hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
793ebca2c5cae0b63b30b277818a31a95c0d9738 | 2,349 | py | Python | utils/makeweights.py | jakobmoss/tsa | ce67c67b03c19d0e5ceec568e95d0f16860b4efe | [
"MIT"
] | null | null | null | utils/makeweights.py | jakobmoss/tsa | ce67c67b03c19d0e5ceec568e95d0f16860b4efe | [
"MIT"
] | 36 | 2016-04-18T12:44:14.000Z | 2016-05-26T14:25:47.000Z | utils/makeweights.py | jakobmoss/tsa | ce67c67b03c19d0e5ceec568e95d0f16860b4efe | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Time Series Analysis -- Generate statistical weigts from scatter
#
# Author: Jakob Rørsted Mosumgaard
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
###############################################################################
# Modules
###############################################################################
from __future__ import print_function, with_statement, division
import numpy as np
import bottleneck as bn
###############################################################################
# Functions
###############################################################################
def genweight(datname, dpath, wpath):
"""
Combine time series with statistical weights calculated from scatter
Arguments:
- `datname`: Identifier of data file
- `dpath` : Path to data file (time series).
- `wpath` : Path to scatter file (with same time points!)
"""
# Pretty print
print('Generating weights for {0} !'.format(dpath))
# Load data and weights
t, d = np.loadtxt(dpath, unpack=True)
tt, sig = np.loadtxt(wpath, unpack=True)
# Check that times are indeed the same
tdif = t - tt
if tdif.any() != 0:
print('Error! Not the same time points! Quitting!')
exit()
# Moving variance (Hans: M = 50 - 100)
M = 70
movstd = bn.move_std(sig, M, min_count=1)
movvar = np.square(movstd)
# Remove first point
x = 1
t = t[x:]
d = d[x:]
movvar = movvar[x:]
# Calculate weights from scatter (1 / variance)
w = np.divide(1.0, movvar)
# Save
outfile = star + '_with-weights.txt'
np.savetxt(outfile, np.transpose([t, d, w]), fmt='%.15e', delimiter='\t')
# Done!
print('Done!\n')
###############################################################################
# Script
###############################################################################
if __name__ == "__main__":
# Definitions
datdir = '../../data/'
ext = '.txt'
append = '-high'
# Run for star 1
star = 'star01'
genweight(star, datdir + star + ext, star + append + ext)
# Run for star 2
star = 'star02'
genweight(star, datdir + star + ext, star + append + ext)
| 29 | 79 | 0.443167 |
793ebe7431b0b4d26d2d92102218831ec659f047 | 1,423 | py | Python | bin/SchemaUpgrade/versions/ef699aba8ba4_version_0_65_002.py | karlam123/DBImport | ebaf3f909841276d289bfb2f6eec0ecafa8395cf | [
"Apache-2.0"
] | 10 | 2019-05-22T04:17:02.000Z | 2021-12-05T16:54:08.000Z | bin/SchemaUpgrade/versions/ef699aba8ba4_version_0_65_002.py | karlam123/DBImport | ebaf3f909841276d289bfb2f6eec0ecafa8395cf | [
"Apache-2.0"
] | 73 | 2019-05-22T04:19:24.000Z | 2022-01-18T05:09:26.000Z | bin/SchemaUpgrade/versions/ef699aba8ba4_version_0_65_002.py | BerryOsterlund/DBImport | aa5f4599834985266fc0bf211f9bb8b348f6ae8e | [
"Apache-2.0"
] | 5 | 2020-05-19T23:46:56.000Z | 2021-11-12T12:02:37.000Z | """Version 0.65.002
Revision ID: ef699aba8ba4
Revises: c8582887a25f
Create Date: 2019-11-09 07:07:10.795416
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from sqlalchemy import Enum
# revision identifiers, used by Alembic.
revision = 'ef699aba8ba4'
down_revision = 'c8582887a25f'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('export_statistics', sa.Column('atlas_schema_duration', sa.Integer(), nullable=True))
op.add_column('export_statistics', sa.Column('atlas_schema_start', sa.DateTime(), nullable=True))
op.add_column('export_statistics', sa.Column('atlas_schema_stop', sa.DateTime(), nullable=True))
op.add_column('export_statistics_last', sa.Column('atlas_schema_duration', sa.Integer(), nullable=True))
op.add_column('export_statistics_last', sa.Column('atlas_schema_start', sa.DateTime(), nullable=True))
op.add_column('export_statistics_last', sa.Column('atlas_schema_stop', sa.DateTime(), nullable=True))
def downgrade():
op.drop_column('export_statistics', 'atlas_schema_duration')
op.drop_column('export_statistics', 'atlas_schema_start')
op.drop_column('export_statistics', 'atlas_schema_stop')
op.drop_column('export_statistics_last', 'atlas_schema_duration')
op.drop_column('export_statistics_last', 'atlas_schema_start')
op.drop_column('export_statistics_last', 'atlas_schema_stop')
| 37.447368 | 108 | 0.770204 |
793ec0704b5a9f4f830235e9fcf28262c193a579 | 3,034 | py | Python | scripts/sequence/get_transcript_pep_accordance.py | mahajrod/MAVR | 4db74dff7376a2ffe4426db720b241de9198f329 | [
"MIT"
] | 10 | 2015-04-28T14:15:04.000Z | 2021-03-15T00:07:38.000Z | scripts/sequence/get_transcript_pep_accordance.py | mahajrod/MAVR | 4db74dff7376a2ffe4426db720b241de9198f329 | [
"MIT"
] | null | null | null | scripts/sequence/get_transcript_pep_accordance.py | mahajrod/MAVR | 4db74dff7376a2ffe4426db720b241de9198f329 | [
"MIT"
] | 6 | 2017-03-16T22:38:41.000Z | 2021-08-11T00:22:52.000Z | #!/usr/bin/env python
__author__ = 'Sergei F. Kliver'
import os
import argparse
from RouToolPa.Routines import SequenceRoutines
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--transcript_file", action="store", dest="transcript_file", required=True,
help="Input file with sequences of transcripts")
parser.add_argument("-p", "--pep_file", action="store", dest="pep_file", required=True,
help="Input file protein sequences")
parser.add_argument("-o", "--output_file", action="store", dest="out", required=True,
help="Output file")
parser.add_argument("-f", "--format", action="store", dest="format", default="fasta",
help="Format of input files. Allowed: fasta, genbank. Default: fasta")
parser.add_argument("-v", "--verbose", action="store_true", dest="verbose",
help="Print warning if no protein was found for CDS")
parser.add_argument("-m", "--parsing_mode", action="store", dest="parsing_mode", default="parse",
help="Parsing mode of sequence files. Allowed: parse, index, index_db."
"Default: parse")
parser.add_argument("-t", "--genetic_code_table", action="store", dest="genetic_code_table", default=1, type=int,
help="Genetic code to use for translation of CDS. "
"Allowed: table number from http://www.ncbi.nlm.nih.gov/Taxonomy/Utils/wprintgc.cgi"
"Default: 1(The standard code)")
parser.add_argument("-d", "--id_check", action="store_true", dest="id_check",
help="Also use id check - if there is id present in both files consider them as accordance")
parser.add_argument("-w", "-transcript_with_no_pep_idfile", action="store", dest="transcript_with_no_pep_idfile",
help="File to write ids of transcripts with no protein hit. Default: not set")
parser.add_argument("-s", "-transcript_with_several_pep_idfile", action="store", dest="transcript_with_several_pep_idfile",
help="File to write ids of transcripts with several protein. Default: not set")
args = parser.parse_args()
SequenceRoutines.get_transcript_to_pep_accordance_from_files(args.transcript_file, args.pep_file, args.out,
verbose=args.verbose,
parsing_mode=args.parsing_mode,
genetic_code_table=args.genetic_code_table,
include_id_check=args.id_check,
transcript_with_no_pep_idfile=args.transcript_with_no_pep_idfile,
transcript_with_several_proteins_idfile=args.transcript_with_several_pep_idfile)
if args.parsing_mode == "index_db":
os.remove("transcript_tmp.idx")
os.remove("pep_tmp.idx")
| 61.918367 | 141 | 0.608767 |
793ec0db04d166ac51aa6396ba6e09e09975b567 | 374 | py | Python | LeetCode/May Leetcoding Challenge/Minimum Cost to Connect Sticks.py | UtkarshPathrabe/Competitive-Coding | ba322fbb1b88682d56a9b80bdd92a853f1caa84e | [
"MIT"
] | 13 | 2021-09-02T07:30:02.000Z | 2022-03-22T19:32:03.000Z | LeetCode/May Leetcoding Challenge/Minimum Cost to Connect Sticks.py | UtkarshPathrabe/Competitive-Coding | ba322fbb1b88682d56a9b80bdd92a853f1caa84e | [
"MIT"
] | null | null | null | LeetCode/May Leetcoding Challenge/Minimum Cost to Connect Sticks.py | UtkarshPathrabe/Competitive-Coding | ba322fbb1b88682d56a9b80bdd92a853f1caa84e | [
"MIT"
] | 3 | 2021-08-24T16:06:22.000Z | 2021-09-17T15:39:53.000Z | class Solution:
def connectSticks(self, sticks: List[int]) -> int:
totalCost, pq = 0, []
for stick in sticks:
heapq.heappush(pq, stick)
while len(pq) > 1:
stick1, stick2 = heapq.heappop(pq), heapq.heappop(pq)
heapq.heappush(pq, stick1 + stick2)
totalCost += stick1 + stick2
return totalCost | 37.4 | 65 | 0.564171 |
793ec0e43b22791c7abee13229feb4af8a5ed8b4 | 131 | py | Python | src/bit_check.py | mguitar9232/DW_ETL_project | 7df262f03d5164fa156937d2857ace4374e29da0 | [
"MIT"
] | null | null | null | src/bit_check.py | mguitar9232/DW_ETL_project | 7df262f03d5164fa156937d2857ace4374e29da0 | [
"MIT"
] | null | null | null | src/bit_check.py | mguitar9232/DW_ETL_project | 7df262f03d5164fa156937d2857ace4374e29da0 | [
"MIT"
] | null | null | null | import struct;
result = struct.calcsize("P") * 8
if(result == 64):
print('## 64')
else:
print('## 32')
| 13.1 | 34 | 0.473282 |
793ec1bd97197b575ddc8bbafd95d40b4837e129 | 10,469 | py | Python | vnpy/trader/app/jaqsService/jsEngine.py | OceanMT/vnpy_py3 | 0901e9381c54e615247eb753bac476a911c9ae5d | [
"MIT"
] | 11 | 2019-10-28T13:01:48.000Z | 2021-06-20T03:38:09.000Z | vnpy/trader/app/jaqsService/jsEngine.py | Rayshawn8/vnpy_Amerlin-1.1.20 | d701d8f12c29cc33f58ea025920b0c7240f74f82 | [
"MIT"
] | null | null | null | vnpy/trader/app/jaqsService/jsEngine.py | Rayshawn8/vnpy_Amerlin-1.1.20 | d701d8f12c29cc33f58ea025920b0c7240f74f82 | [
"MIT"
] | 6 | 2019-10-28T13:16:13.000Z | 2020-09-08T08:03:41.000Z | # encoding: UTF-8
import json
from collections import defaultdict
from . import jrpc_server
from vnpy.event import Event
from vnpy.trader.vtFunction import getJsonPath
from vnpy.trader.vtObject import VtLogData, VtOrderReq, VtCancelOrderReq
from vnpy.trader.vtConstant import *
EVENT_JS_LOG = 'eJsLog'
ACTION_MAP = {}
ACTION_MAP['Buy'] = (DIRECTION_LONG, OFFSET_OPEN)
ACTION_MAP['Sell'] = (DIRECTION_SHORT, OFFSET_CLOSE)
ACTION_MAP['Short'] = (DIRECTION_SHORT, OFFSET_OPEN)
ACTION_MAP['Cover'] = (DIRECTION_LONG, OFFSET_CLOSE)
ACTION_MAP['CoverYesterday'] = (DIRECTION_LONG, OFFSET_CLOSEYESTERDAY)
ACTION_MAP['SellYesterday'] = (DIRECTION_SHORT, OFFSET_CLOSEYESTERDAY)
ACTION_MAP_REVERSE = {v:k for k,v in list(ACTION_MAP.items())}
STATUS_MAP_REVERSE = {}
STATUS_MAP_REVERSE[STATUS_NOTTRADED] = 'Accepted'
STATUS_MAP_REVERSE[STATUS_PARTTRADED] = 'Accepted'
STATUS_MAP_REVERSE[STATUS_ALLTRADED] = 'Filled'
STATUS_MAP_REVERSE[STATUS_CANCELLED] = 'Cancelled'
STATUS_MAP_REVERSE[STATUS_REJECTED] = 'Rejected'
STATUS_MAP_REVERSE[STATUS_UNKNOWN] = 'New'
EXCHANGE_MAP = {}
EXCHANGE_MAP['SH'] = EXCHANGE_SSE
EXCHANGE_MAP['SZ'] = EXCHANGE_SZSE
EXCHANGE_MAP['CFE'] = EXCHANGE_CFFEX
EXCHANGE_MAP['SHF'] = EXCHANGE_SHFE
EXCHANGE_MAP['DCE'] = EXCHANGE_DCE
EXCHANGE_MAP['CZC'] = EXCHANGE_CZCE
EXCHANGE_MAP_REVERSE = {v:k for k, v in list(EXCHANGE_MAP.items())}
########################################################################
class JsEngine(object):
"""JAQS服务引擎"""
settingFileName = 'JS_setting.json'
settingfilePath = getJsonPath(settingFileName, __file__)
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine):
"""Constructor"""
self.mainEngine = mainEngine
self.eventEngine = eventEngine
self.server = None # RPC服务器
self.cbDict = {} # 回调函数字典
# 注册日志事件类型
self.mainEngine.registerLogEvent(EVENT_JS_LOG)
# 初始化
self.initCallback()
self.initServer()
#----------------------------------------------------------------------
def initCallback(self):
"""初始化回调函数映射"""
self.cbDict['.sys.heartbeat'] = self.onHeartbeat
self.cbDict['auth.login'] = self.onLogin
self.cbDict['auth.use_strategy'] = self.onUseStrategy
self.cbDict['oms.query_position'] = self.onQueryPosition
self.cbDict['oms.query_order'] = self.onQueryOrder
self.cbDict['oms.place_order'] = self.onPlaceOrder
self.cbDict['oms.cancel_order'] = self.onCancelOrder
#----------------------------------------------------------------------
def initServer(self):
"""初始化"""
with open(self.settingfilePath) as f:
setting = json.load(f)
host = setting['host']
port = setting['port']
addr = "tcp://%s:%s" %(host, port)
# 初始化RPC服务器
self.server = jrpc_server.JRpcServer()
self.server.on_call = self.onCall
self.server.listen(addr)
self.writeLog('Jaqs服务器启动成功')
#----------------------------------------------------------------------
def onCall(self, clientId, req):
"""RPC服务回调函数"""
method = req['method']
cb = self.cbDict.get(method, None)
if not cb:
self.writeLog('无法找到方法%s对应的回调函数' %method)
return
self.writeLog('收到请求:%s' %req)
cb(clientId, req)
#----------------------------------------------------------------------
def onHeartbeat(self, clientId, req):
"""心跳"""
pass
#----------------------------------------------------------------------
def onLogin(self, clientId, req):
"""登录"""
params = req['params']
result = {
'username': params['username'],
'name': params['username'],
'strategies': [1],
'broker_strategies': [1]
}
error = [0, '']
self.server.send_rsp(clientId, req, result, error)
self.writeLog('发出响应:%s' %result)
#----------------------------------------------------------------------
def onUseStrategy(self, clientId, req):
"""使用策略"""
result = req['params']['account_id']
error = [0, '']
self.server.send_rsp(clientId, req, result, error)
self.writeLog('发出响应:%s' %result)
#----------------------------------------------------------------------
def onQueryPosition(self, clientId, req):
"""查询持仓"""
l = self.mainEngine.getAllPositionDetails()
result = defaultdict(list)
for detail in l:
security = self.converSymbol(detail.vtSymbol)
# 多头
if detail.longPos:
result['security'].append(security)
result['side'].append('Long')
result['cost_price'].append(0)
result['float_pnl'].append(0)
result['close_pnl'].append(0)
result['trading_pnl'].append(0)
result['holding_pnl'].append(0)
result['commission'].append(0)
result['init_size'].append(0)
result['current_size'].append(detail.longPos)
result['enable_size'].append(detail.longPos-detail.longPosFrozen)
result['frozen_size'].append(detail.longPosFrozen)
result['uncome_size'].append(0)
result['pre_size'].append(detail.longYd)
result['today_size'].append(detail.longTd)
# 空头
if detail.shortPos:
result['security'].append(security)
result['side'].append('Short')
result['cost_price'].append(0)
result['float_pnl'].append(0)
result['close_pnl'].append(0)
result['trading_pnl'].append(0)
result['holding_pnl'].append(0)
result['commission'].append(0)
result['init_size'].append(0)
result['current_size'].append(detail.shortPos)
result['enable_size'].append(detail.shortPos-detail.shortPosFrozen)
result['frozen_size'].append(detail.shortPosFrozen)
result['uncome_size'].append(0)
result['pre_size'].append(detail.shortYd)
result['today_size'].append(detail.shortTd)
error = [0, '']
self.server.send_rsp(clientId, req, result, error)
self.writeLog('发出响应:%s' %result)
#----------------------------------------------------------------------
def onQueryOrder(self, clientId, req):
"""查询委托"""
l = self.mainEngine.getAllOrders()
result = defaultdict(list)
for order in l:
result['task_id'].append(order.vtOrderID)
result['entrust_no'].append(order.vtOrderID)
result['entrust_price'].append(order.price)
result['entrust_size'].append(order.totalVolume)
result['sub_seq'].append(0)
result['sub_total'].append(0)
result['batch_no'].append(0)
result['fill_price'].append(order.price)
result['fill_size'].append(order.tradedVolume)
result['algo'].append('')
result['entrust_action'].append(ACTION_MAP_REVERSE[(order.direction, order.offset)])
result['order_status'].append(STATUS_MAP_REVERSE[order.status])
result['security'].append(self.converSymbol(order.vtSymbol))
hh, mm, ss = order.orderTime.split(':')
result['entrust_time'].append(int(hh)*10000000+
int(mm)*100000+
int(ss)*1000)
error = [0, '']
self.server.send_rsp(clientId, req, result, error)
self.writeLog('发出响应:%s' %result)
#----------------------------------------------------------------------
def onPlaceOrder(self, clientId, req):
"""委托"""
params = req['params']
s, e = params['security'].split('.')
contract = self.mainEngine.getContract(s)
if not contract:
vtOrderID = ''
error = [-1, '委托失败,找不到合约%s' %params['security']]
else:
vor = VtOrderReq()
vor.symbol = s
vor.exchange = EXCHANGE_MAP[e]
vor.direction, vor.offset = ACTION_MAP[params['action']]
vor.price = float(params['price'])
vor.volume = int(params['size'])
vor.priceType = PRICETYPE_LIMITPRICE
vtOrderID = self.mainEngine.sendOrder(vor, contract.gatewayName)
error = [0, '']
self.server.send_rsp(clientId, req, vtOrderID, error)
self.writeLog('发出响应:%s' %vtOrderID)
#----------------------------------------------------------------------
def onCancelOrder(self, clientId, req):
"""撤单"""
params = req['params']
vtOrderID = params['task_id']
gatewayName, orderID = vtOrderID.split('.')
vcor = VtCancelOrderReq()
vcor.orderID = vtOrderID
self.mainEngine.cancelOrder(vcor, gatewayName)
error = [0, '']
self.server.send_rsp(clientId, req, 'successful', error)
self.writeLog('发出响应:%s' %result)
#----------------------------------------------------------------------
def writeLog(self, content):
"""发出日志事件"""
log = VtLogData()
log.logContent = content
log.gatewayName = 'JAQS_SERVICE'
event = Event(type_=EVENT_JS_LOG)
event.dict_['data'] = log
self.eventEngine.put(event)
#----------------------------------------------------------------------
def converSymbol(self, vtSymbol):
"""转换合约代码"""
contract = self.mainEngine.getContract(vtSymbol)
if not contract:
return ''
e = EXCHANGE_MAP_REVERSE[contract.exchange]
return '.'.join(contract.symbol, e)
| 36.1 | 96 | 0.503486 |
793ec2230f93cc4d81fd3e6f766b2553e20aa75d | 1,875 | py | Python | ipPanel/app/views.py | zwj2017-NK/NKAMG | fd8590ce2bbc506c2d41cdcee57d41a222fe3dd2 | [
"Apache-2.0"
] | 2 | 2018-04-25T11:53:25.000Z | 2020-01-11T10:54:12.000Z | ipPanel/app/views.py | zwj2017-NK/NK | 9a90a62ab86954b566b3703e3c2aa4587dc42ee4 | [
"Apache-2.0"
] | null | null | null | ipPanel/app/views.py | zwj2017-NK/NK | 9a90a62ab86954b566b3703e3c2aa4587dc42ee4 | [
"Apache-2.0"
] | null | null | null | from flask import render_template
from flask.ext.appbuilder.models.sqla.interface import SQLAInterface
from flask.ext.appbuilder import ModelView, BaseView, expose, has_access
from app import appbuilder, db
import pandas as pd
import json
"""
Create your Views::
class MyModelView(ModelView):
datamodel = SQLAInterface(MyModel)
Next, register your Views::
appbuilder.add_view(MyModelView, "My View", icon="fa-folder-open-o", category="My Category", category_icon='fa-envelope')
"""
"""
Application wide 404 error handler
"""
@appbuilder.app.errorhandler(404)
def page_not_found(e):
return render_template('404.html', base_template=appbuilder.base_template, appbuilder=appbuilder), 404
class IpManagerView(BaseView):
default_view = "panel"
@expose('/panel')
@has_access
def panel(self):
return self.render_template("ManagingPanel.html")
@expose('/data')
@has_access
def data(self):
df = pd.read_csv("csv/heatmap.csv")
df = df[["src_ip"]]
df.drop_duplicates(["src_ip"])
return json.dumps({"maskbits": 6, "ipranges": [{"beginip": "10.79.196.0", "endip": "10.79.196.255"}],
"groupnames": {"10.79.196.0": "Beijing1", "10.79.196.64": "Beijing2", "10.79.196.128": "Tianjin1", "10.79.196.192": "Tianjin2", "-1": "Other"},
"grouptree": {'text':'Show all', 'nodes':[
{'text':'Beijing Station', 'nodes':[
{'text':'Beijing1'},
{'text':'Beijing2'}
]},
{'text':'Tianjin', 'nodes':[
{'text':'Tianjin1'},
{'text':'Tianjin2'}
]},
{'text':'Other'}
]},
"list": df.to_dict(orient='records')})
appbuilder.add_view(IpManagerView, "IP Manager")
db.create_all()
| 28.846154 | 155 | 0.5872 |
793ec2a0274acc973281425f0cc8da2d04ba0ed0 | 13,489 | py | Python | IRIS_data_download/IRIS_download_support/obspy/clients/seedlink/slclient.py | earthinversion/Fnet_IRIS_data_automated_download | 09a6e0c992662feac95744935e038d1c68539fa1 | [
"MIT"
] | 2 | 2020-03-05T01:03:01.000Z | 2020-12-17T05:04:07.000Z | IRIS_data_download/IRIS_download_support/obspy/clients/seedlink/slclient.py | earthinversion/Fnet_IRIS_data_automated_download | 09a6e0c992662feac95744935e038d1c68539fa1 | [
"MIT"
] | 4 | 2021-03-31T19:25:55.000Z | 2021-12-13T20:32:46.000Z | IRIS_data_download/IRIS_download_support/obspy/clients/seedlink/slclient.py | earthinversion/Fnet_IRIS_data_automated_download | 09a6e0c992662feac95744935e038d1c68539fa1 | [
"MIT"
] | 2 | 2020-09-08T19:33:40.000Z | 2021-04-05T09:47:50.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module to create and use a connection to a SeedLink server using a
SeedLinkConnection object.
A new SeedLink application can be created by sub-classing SLClient and
overriding at least the packet_handler method of SLClient.
Part of Python implementation of libslink of Chad Trabant and
JSeedLink of Anthony Lomax
:copyright:
The ObsPy Development Team (devs@obspy.org) & Anthony Lomax
:license:
GNU Lesser General Public License, Version 3
(https://www.gnu.org/copyleft/lesser.html)
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from future.builtins import * # NOQA
import logging
import sys
import traceback
from .client.seedlinkconnection import SeedLinkConnection
from .seedlinkexception import SeedLinkException
from .slpacket import SLPacket
USAGE = """
## General program options ##
-V report program version
-h show this usage message
-v be more verbose, multiple flags can be used
-p print details of data packets
-nd delay network re-connect delay (seconds), default 30
-nt timeout network timeout (seconds), re-establish connection if no
data/keepalives are received in this time, default 600
-k interval send keepalive (heartbeat) packets this often (seconds)
-x statefile save/restore stream state information to this file
-t begintime sets a beginning time for the initiation of data transmission
(year,month,day,hour,minute,second)
-e endtime sets an end time for windowed data transmission
(year,month,day,hour,minute,second)
-i infolevel request this INFO level, write response to std out, and exit
infolevel is one of: ID, STATIONS, STREAMS, GAPS, CONNECTIONS,
ALL
## Data stream selection ##
-l listfile read a stream list from this file for multi-station mode
-s selectors selectors for uni-station or default for multi-station
-S streams select streams for multi-station (requires SeedLink >= 2.5)
'streams' = 'stream1[:selectors1],stream2[:selectors2],...'
'stream' is in NET_STA format, for example:
-S \"IU_KONO:BHE BHN,GE_WLF,MN_AQU:HH?.D\"
<[host]:port> Address of the SeedLink server in host:port format
if host is omitted (i.e. ':18000'), localhost is assumed
"""
# default logger
logger = logging.getLogger('obspy.clients.seedlink')
class SLClient(object):
"""
Basic class to create and use a connection to a SeedLink server using a
SeedLinkConnection object.
A new SeedLink application can be created by sub-classing SLClient and
overriding at least the packet_handler method of SLClient.
:var slconn: SeedLinkConnection object for communicating with the
SeedLinkConnection over a socket.
:type slconn: SeedLinkConnection
:var verbose: Verbosity level, 0 is lowest.
:type verbose: int
:var ppackets: Flag to indicate show detailed packet information.
:type ppackets: bool
:var streamfile: Name of file containing stream list for multi-station
mode.
:type streamfile: str
:var selectors: Selectors for uni-station or default selectors for
multi-station.
:type selectors: str
:var multiselect: Selectors for multi-station.
:type multiselect: str
:var statefile: Name of file for reading (if exists) and storing state.
:type statefile: str
:var begin_time: Beginning of time window for read start in past.
:type begin_time: str
:var end_time: End of time window for reading windowed data.
:type end_time: str
:var infolevel: INFO LEVEL for info request only.
:type infolevel: str
:type timeout: float
:param timeout: Timeout in seconds, passed on to the underlying
SeedLinkConnection.
"""
VERSION = "1.2.0X00"
VERSION_YEAR = "2011"
VERSION_DATE = "24Nov" + VERSION_YEAR
COPYRIGHT_YEAR = VERSION_YEAR
PROGRAM_NAME = "SLClient v" + VERSION
VERSION_INFO = PROGRAM_NAME + " (" + VERSION_DATE + ")"
def __init__(self, loglevel='DEBUG', timeout=None):
"""
Creates a new instance of SLClient with the specified logging object
"""
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
logger.setLevel(numeric_level)
self.verbose = 0
self.ppackets = False
self.streamfile = None
self.selectors = None
self.multiselect = None
self.statefile = None
self.begin_time = None
self.end_time = None
self.infolevel = None
self.timeout = timeout
self.slconn = SeedLinkConnection(timeout=timeout)
def parse_cmd_line_args(self, args):
"""
Parses the command line arguments.
:type args: list
:param args: main method arguments.
:return: -1 on error, 1 if version or help argument found, 0 otherwise.
"""
if len(args) < 2:
self.print_usage(False)
return 1
optind = 1
while optind < len(args):
if args[optind] == "-V":
print(self.VERSION_INFO, file=sys.stderr)
return 1
elif args[optind] == "-h":
self.print_usage(False)
return 1
elif args[optind].startswith("-v"):
self.verbose += len(args[optind]) - 1
elif args[optind] == "-p":
self.ppackets = True
elif args[optind] == "-nt":
optind += 1
self.slconn.set_net_timeout(int(args[optind]))
elif args[optind] == "-nd":
optind += 1
self.slconn.set_net_delay(int(args[optind]))
elif args[optind] == "-k":
optind += 1
self.slconn.set_keep_alive(int(args[optind]))
elif args[optind] == "-l":
optind += 1
self.streamfile = args[optind]
elif args[optind] == "-s":
optind += 1
self.selectors = args[optind]
elif args[optind] == "-S":
optind += 1
self.multiselect = args[optind]
elif args[optind] == "-x":
optind += 1
self.statefile = args[optind]
elif args[optind] == "-t":
optind += 1
self.begin_time = args[optind]
elif args[optind] == "-e":
optind += 1
self.end_time = args[optind]
elif args[optind] == "-i":
optind += 1
self.infolevel = args[optind]
elif args[optind].startswith("-"):
print("Unknown option: " + args[optind], file=sys.stderr)
return -1
elif self.slconn.get_sl_address() is None:
self.slconn.set_sl_address(args[optind])
else:
print("Unknown option: " + args[optind], file=sys.stderr)
return -1
optind += 1
return 0
def initialize(self):
"""
Initializes this SLClient.
"""
if self.slconn.get_sl_address() is None:
message = "no SeedLink server specified"
raise SeedLinkException(message)
if self.verbose >= 2:
self.ppackets = True
if self.slconn.get_sl_address().startswith(":"):
self.slconn.set_sl_address("127.0.0.1" +
self.slconn.get_sl_address())
if self.streamfile is not None:
self.slconn.read_stream_list(self.streamfile, self.selectors)
if self.multiselect is not None:
self.slconn.parse_stream_list(self.multiselect, self.selectors)
else:
if self.streamfile is None:
self.slconn.set_uni_params(self.selectors, -1, None)
if self.statefile is not None:
self.slconn.set_state_file(self.statefile)
else:
if self.begin_time is not None:
self.slconn.set_begin_time(self.begin_time)
if self.end_time is not None:
self.slconn.set_end_time(self.end_time)
def run(self, packet_handler=None):
"""
Start this SLClient.
:type packet_handler: func
:param packet_handler: Custom packet handler funtion to override
`self.packet_handler` for this seedlink request. The function will
be repeatedly called with two arguments: the current packet counter
(`int`) and the currently served seedlink packet
(:class:`~obspy.clients.seedlink.SLPacket`). The function should
return `True` to abort the request or `False` to continue the
request.
"""
if packet_handler is None:
packet_handler = self.packet_handler
if self.infolevel is not None:
self.slconn.request_info(self.infolevel)
# Loop with the connection manager
count = 1
slpack = self.slconn.collect()
while slpack is not None:
if (slpack == SLPacket.SLTERMINATE):
break
try:
# do something with packet
terminate = packet_handler(count, slpack)
if terminate:
break
except SeedLinkException as sle:
print(self.__class__.__name__ + ": " + sle.value)
if count >= sys.maxsize:
count = 1
print("DEBUG INFO: " + self.__class__.__name__ + ":", end=' ')
print("Packet count reset to 1")
else:
count += 1
slpack = self.slconn.collect()
# Close the SeedLinkConnection
self.slconn.close()
def packet_handler(self, count, slpack):
"""
Processes each packet received from the SeedLinkConnection.
This method should be overridden when sub-classing SLClient.
:type count: int
:param count: Packet counter.
:type slpack: :class:`~obspy.clients.seedlink.slpacket.SLPacket`
:param slpack: packet to process.
:rtype: bool
:return: True if connection to SeedLink server should be closed and
session terminated, False otherwise.
"""
# check if not a complete packet
if slpack is None or (slpack == SLPacket.SLNOPACKET) or \
(slpack == SLPacket.SLERROR):
return False
# get basic packet info
seqnum = slpack.get_sequence_number()
type = slpack.get_type()
# process INFO packets here
if (type == SLPacket.TYPE_SLINF):
return False
if (type == SLPacket.TYPE_SLINFT):
print("Complete INFO:\n" + self.slconn.get_info_string())
if self.infolevel is not None:
return True
else:
return False
# can send an in-line INFO request here
try:
# if (count % 100 == 0 and not self.slconn.state.expect_info):
if (count % 100 == 0):
infostr = "ID"
self.slconn.request_info(infostr)
except SeedLinkException as sle:
print(self.__class__.__name__ + ": " + sle.value)
# if here, must be a data blockette
print(self.__class__.__name__ + ": packet seqnum:", end=' ')
print(str(seqnum) + ": blockette type: " + str(type))
if not self.ppackets:
return False
# process packet data
trace = slpack.get_trace()
if trace is not None:
print(self.__class__.__name__ + ": blockette contains a trace: ")
print(trace.id, trace.stats['starttime'], end=' ')
print(" dt:" + str(1.0 / trace.stats['sampling_rate']), end=' ')
print(" npts:" + str(trace.stats['npts']), end=' ')
print(" sampletype:" + str(trace.stats['sampletype']), end=' ')
print(" dataquality:" + str(trace.stats['dataquality']))
if self.verbose >= 3:
print(self.__class__.__name__ + ":")
print("blockette contains a trace: " + str(trace.stats))
else:
print(self.__class__.__name__ + ": blockette contains no trace")
return False
def print_usage(self, concise=True):
"""
Prints the usage message for this class.
"""
print("\nUsage: python %s [options] <[host]:port>" %
(self.__class__.__name__))
if concise:
usage = "Use '-h' for detailed help"
else:
usage = USAGE
print(usage)
@classmethod
def main(cls, args):
"""
Main method - creates and runs an SLClient using the specified
command line arguments
"""
try:
sl_client = SLClient()
rval = sl_client.parse_cmd_line_args(args)
if (rval != 0):
sys.exit(rval)
sl_client.initialize()
sl_client.run()
except Exception as e:
logger.critical(e)
traceback.print_exc()
if __name__ == '__main__':
SLClient.main(sys.argv)
| 37.262431 | 79 | 0.58885 |
793ec310629e0df22968dda39e17ba556f81a1d8 | 1,452 | py | Python | curve_fitting.py | Akira794/PreviewController-python | 7b822870680988a2ab28a2177963a14c9b27f291 | [
"MIT"
] | 2 | 2019-09-05T11:03:20.000Z | 2021-10-09T02:33:17.000Z | curve_fitting.py | Akira794/PreviewController-python | 7b822870680988a2ab28a2177963a14c9b27f291 | [
"MIT"
] | null | null | null | curve_fitting.py | Akira794/PreviewController-python | 7b822870680988a2ab28a2177963a14c9b27f291 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np # Numerical library
from scipy import * # Load the scipy functions
from control.matlab import * # Load the controls systems library
from matplotlib import pyplot as plt
class CurveFitting():
def __init__(self, _x, _t, _dim):
self.x = _x
self.t = _t
self.dim = _dim
self.W = 0
def calc_polynomial_fitting(self):
A = np.zeros((self.dim+1,self.dim+1))
i = 0
for i in range(self.dim+1):
for j in range(self.dim+1):
temp = np.power(self.x,i+j).sum()
if( i == j):
temp += self.dim
A[i, j] = temp
T = []
for n in range(self.dim+1):
T.append(np.dot(np.power(self.x,n),self.t).sum())
#print(A)
#print(T)
self.W = np.linalg.solve(A,T)
print(self.W)
def out_y(self, x_range):
result = self.W[0]
for k in range(self.dim+1):
result += self.W[k]*pow(x_range,k)
return result
"""
x = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0]
y = [0.0, 1.0, 2.0, 4.5, 8.0, 12.5, 20.0, 30.5, 35, 40.5, 50]
CF = CurveFitting(x,y,5)
CF.calc_polynomial_fitting()
print(CF.W)
p = []
for i in x:
p.append(CF.out_y(i))
plt.plot(x,y,color="red",label="$Base$")
plt.plot(x,p,color="green",label="$CurveFitting$")
plt.show()
"""
| 26.4 | 67 | 0.52135 |
793ec32b251466125cdcb72dd5f5f3e59bd24b04 | 1,972 | py | Python | pynd/_tests/test_astutils.py | d0ugal/pynd | a08172d5b2232e9eead1a1b23d50c648989f3dfe | [
"Apache-2.0"
] | 24 | 2016-12-23T10:09:30.000Z | 2021-04-21T05:20:11.000Z | pynd/_tests/test_astutils.py | d0ugal/pynd | a08172d5b2232e9eead1a1b23d50c648989f3dfe | [
"Apache-2.0"
] | 11 | 2016-12-23T08:00:36.000Z | 2017-03-27T07:50:30.000Z | pynd/_tests/test_astutils.py | d0ugal/pynd | a08172d5b2232e9eead1a1b23d50c648989f3dfe | [
"Apache-2.0"
] | 3 | 2016-12-28T19:25:19.000Z | 2020-04-26T20:39:53.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pynd import astutils
def test_is_python():
walker = astutils.ASTWalker([], [])
assert walker._is_python("real_python.py")
def test_is_not_python():
walker = astutils.ASTWalker([], [])
assert not walker._is_python("fake_python.yp")
def test_walk_files_none(tmpdir):
walker = astutils.ASTWalker([], [])
assert list(walker._walk_files(tmpdir.dirname)) == []
def test_walk_files_one(tmpdir):
tmpdir.join("test.py").write("")
tmpdir.join("test.not-py").write("")
walker = astutils.ASTWalker([], [])
assert len(list(walker._walk_files(tmpdir.dirname))) == 1
def test_read(tmpdir):
source = tmpdir.join("test.py")
source.write("testing")
walker = astutils.ASTWalker([], [])
assert walker._read(str(source)) == "testing"
def test_walk(tmpdir):
tmpdir = tmpdir.mkdir("sub")
tmpdir.join("test.py").write("import os")
tmpdir.join("test.not-py").write("")
walker = astutils.ASTWalker([tmpdir.dirname, ], [])
result = list(walker.walk())
name, ast = result[0]
assert len(result) == 1
assert name.endswith("sub/test.py")
assert list(ast), [] # TODO: Wat. How does this pass?
def test_walk_syntax_error(tmpdir):
tmpdir = tmpdir.mkdir("sub")
tmpdir.join("test.py").write("import import")
walker = astutils.ASTWalker([tmpdir.dirname, ], [])
result = list(walker.walk())
assert len(result) == 0
| 27.774648 | 76 | 0.677485 |
793ec3ecf0ebff93bcf4a20e27ecf182b2aeb4f5 | 443 | py | Python | thonnycontrib/circuitpython/api_stubs/framebuf.py | thonny/thonny-circuitpython | b58fef0a0a0d9a3426d00919531a9e43d5f9bcb7 | [
"MIT"
] | 1 | 2020-02-12T19:37:27.000Z | 2020-02-12T19:37:27.000Z | thonnycontrib/circuitpython/api_stubs/framebuf.py | thonny/thonny-circuitpython | b58fef0a0a0d9a3426d00919531a9e43d5f9bcb7 | [
"MIT"
] | null | null | null | thonnycontrib/circuitpython/api_stubs/framebuf.py | thonny/thonny-circuitpython | b58fef0a0a0d9a3426d00919531a9e43d5f9bcb7 | [
"MIT"
] | 1 | 2021-11-17T05:21:50.000Z | 2021-11-17T05:21:50.000Z |
class FrameBuffer:
''
def blit():
pass
def fill():
pass
def fill_rect():
pass
def hline():
pass
def line():
pass
def pixel():
pass
def rect():
pass
def scroll():
pass
def text():
pass
def vline():
pass
def FrameBuffer1():
pass
GS4_HMSB = 2
MONO_HLSB = 3
MONO_HMSB = 4
MONO_VLSB = 0
MVLSB = 0
RGB565 = 1
| 10.302326 | 20 | 0.467269 |
793ec439dcf8acdce4d4ba8b5187698c0cf3b6e6 | 4,234 | py | Python | tests/test_format.py | ebadali/ukPostCodes | 80cb470f1c92c21d9c2155711480565cc7acc50a | [
"MIT"
] | null | null | null | tests/test_format.py | ebadali/ukPostCodes | 80cb470f1c92c21d9c2155711480565cc7acc50a | [
"MIT"
] | null | null | null | tests/test_format.py | ebadali/ukPostCodes | 80cb470f1c92c21d9c2155711480565cc7acc50a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from .context import postcodes
import unittest
class PostCodeFormatTestSuite(unittest.TestCase):
"""This test suit tests for Formatting of outputs on certain inputs.
The provided inputs in the test cases covers most of the formats.
Todo:
* add special uk postal code test cases
"""
def shortDescription(self):
"""preventing nose (unittest) from using the docstring"""
return None
def test_format_correct(self):
"""
This test case demonstrates formatting of all possible valid examples:
where postcode
-Ends with 9AA
-contains Areas with only single-digit districts
-contains Areas with only double-digit districts
-contains Areas with a district 0
and certain important cases
-condition#1 all formats end with 9AA
-condition#2 The letters QVX are NOT used in the first position.
-condition#3 The letters IJZ are NOT used in the second position.
-condition#4 The only letters to appear in the third position are ABCDEFGHJKPSTUW when the structure starts with A9A.
-condition#5 The only letters to appear in the fourth position are ABEHMNPRVWXY when the structure starts with AA9A.
-condition#6 The final two letters Does NOT use the letters CIKMOV, so as not to resemble digits or each other when hand-written.
"""
cases = [('AA9A9AA','AA9A 9AA'), ("SW1W0NY","SW1W 0NY"), ("PO167GZ","PO16 7GZ"),
("GU167HF","GU16 7HF"), ("L18JQ","L1 8JQ"),("A9A9AA","A9A 9AA"),("A99AA","A9 9AA"),
("A999AA","A99 9AA"),("AA99AA","AA9 9AA"),("AA999AA","AA99 9AA"),
("EC1A1BB","EC1A 1BB"),("W1A0AX","W1A 0AX"),("M11AE","M1 1AE"),("B338TH", "B33 8TH"),("CR2 6XH","CR2 6XH"),("DN551PT","DN55 1PT")]
for unformated,formated in cases:
self.assertEqual(postcodes.formatPostCode(unformated), formated)
def test_format_incorrect(self):
"""
This test case demonstrates formatting of all possible invalid examples:
Where Postcode,
-does not End with 9AA
-does not contain Areas with only single-digit districts
-does not contain Areas with only double-digit districts
-does not contain Areas with a district 0
and covers major cases like:
-condition#1 all formats dose not end with 9AA
-condition#2 The letters QVX are used in the first position.
-condition#3 The letters IJZ are used in the second position.
-condition#4 The only letters to appear in the third position are NOT ABCDEFGHJKPSTUW when the structure starts with A9A.
-condition#5 The only letters to appear in the fourth position are NOT ABEHMNPRVWXY when the structure starts with AA9A.
-condition#6 The final two letters use the letters CIKMOV, so as not to resemble digits or each other when hand-written.
"""
# condition#2
condition2 = [('QA9A 9AAA','AA9A 9AA'),('VA9A 9AAA','AA9A 9AA'),('XA9A 9AAA','AA9A 9AA')]
# condition#3
condition3 = [('AI9A 9AAA','AA9A 9AA'),('AJ9A 9AAA','AA9A 9AA'),('AZ9A 9AAA','AA9A 9AA')]
# condition#4
# ILMNOQRVXYZ = LETTERS - ABCDEFGHJKPSTUW
condition4 = [("A9"+val+" 9AA","AA9 9AA") for val in "ILMNOQRVXYZ"]
# condition#5
# CDFGIJKLOQSTUZ = LETTERS - ABEHMNPRVWXY
condition5 = [("AA9"+val+" 9AA","AA9A 9AA") for val in "CDFGIJKLOQSTUZ"]
# condition#6
# As it says on final two positions, either one of them would do
condition6 = [("A9A 9"+val+""+val,"AA9A 9AA") for val in "CIKMOV"]
# condition#1 and Generic cases
cases = [("AA999 9AA","A99 9AA"),("A999 9AA","AA9 9AA"),("AAA99 9AA","AA99 9AA"),("A999 9AA","AA99 9AA"),
('AAAA 9AA','AA9A 9AA'),('AA9A 99A','AA9A 9AA'),('AA9A AAA','AA9A 9AA'),('AA9A 9AAA','AA9A 9AA'),
("AA999 9AA","A99 9AA"),("A999 9AA","AA9 9AA"),("AAA99 9AA","AA99 9AA"),("A999 9AA","AA99 9AA"),
('AAAA 9AA','AA9A 9AA'),('AA9A 99A','AA9A 9AA'),('AA9A AAA','AA9A 9AA'),('AA9A 9AAA','AA9A 9AA'),
('QA9A 9AAA','AA9A 9AA'),('VA9A 9AAA','AA9A 9AA'),('XA9A 9AAA','AA9A 9AA')]
cases = cases + condition2 + condition3 +condition4 + condition5 + condition6
for unformated,formated in cases:
with self.assertRaises(postcodes.PostCodeError):
postcodes.formatPostCode(unformated)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(PostCodeFormatTestSuite)
unittest.TextTestRunner(verbosity=2).run(suite) | 41.106796 | 135 | 0.700992 |
793ec4e7e35939079862cd406a8e90062bea4cf8 | 9,243 | py | Python | src/api.py | openphacts/openphacts-irs | 170caa5881182a10bc5d7bf3b22ddedd6eb6ad8f | [
"MIT"
] | 5 | 2016-05-04T06:22:07.000Z | 2021-03-27T09:02:39.000Z | src/api.py | openphacts/ops-search | 170caa5881182a10bc5d7bf3b22ddedd6eb6ad8f | [
"MIT"
] | 15 | 2015-04-22T16:18:22.000Z | 2017-07-03T11:09:20.000Z | src/api.py | openphacts/openphacts-irs | 170caa5881182a10bc5d7bf3b22ddedd6eb6ad8f | [
"MIT"
] | 4 | 2016-05-05T07:48:49.000Z | 2021-03-29T15:36:01.000Z | #!/usr/bin/env python3
import bottle
from bottle import hook, route, run, Bottle, get, post, request, response, static_file, url
from urllib.parse import quote
import os.path
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search, Q
from elasticsearch_dsl.query import MultiMatch, Match
from elasticsearch.exceptions import NotFoundError
import yaml
import sys
import json
from rdflib import Graph, plugin
import mimerender
import cgi
import html
import re
from json import dumps
mimerender.register_mime("turtle", ("text/turtle","text/n3"))
mimerender.register_mime("rdfxml", ("application/rdf+xml", "application/xml"))
mimerender.register_mime("nt", ("application/n-triples",))
#mimerender.register_mime("jsonld", ("application/ld+json",))
produces = mimerender.BottleMimeRender()
conf = {}
def find_static():
cwd = None
try:
cwd = os.path.dirname(__file__)
except NameError:
cwd = "."
return os.path.abspath(os.path.join(cwd, "static"))
static_root = find_static()
es=None
def elasticsearch():
global es
if es is not None:
return es
es_hosts = conf.get("elasticsearch")
es = Elasticsearch(es_hosts)
return es
def es_search(query_string, branch, ops_type, limit, fuzzy):
s = Search(using=elasticsearch(), index=(branch), doc_type=ops_type)
s = s[0:int(limit)]
if fuzzy:
q = Q('multi_match', query=query_string, fields=['label^4', 'title^3', 'prefLabel^4', 'identifier', 'description', 'altLabel^2', 'Synonym', 'Definition', 'shortName', 'mnemonic', 'disease_class'], fuzziness="AUTO", prefix_length=5, type='best_fields', tie_breaker=0.3)
else:
q = Q('multi_match', query=query_string, fields=['label^4', 'title^3', 'prefLabel^4', 'identifier', 'description', 'altLabel^2', 'Synonym', 'Definition', 'shortName', 'mnemonic', 'disease_class'], fuzziness=0, type='best_fields', tie_breaker=0.3)
s = s.highlight('label', 'title', 'identifier', 'description', 'prefLabel', 'altLabel', 'Synonym', 'Definition', 'shortName', 'mnemonic', 'disease_class')
s = s.query(q)
es_response = s.execute()
return es_response.to_dict()
def es_autocomplete(query_string, branch, ops_type, limit):
s = Search(using=elasticsearch(), index=(branch), doc_type=ops_type)
s = s[0:int(limit)]
q = Q('multi_match', analyzer="autocomplete", query=query_string, fields=['label^3', 'title^3', 'prefLabel^3', 'altLabel^2'])
#s = s.highlight('label', 'title', 'identifier', 'description', 'prefLabel', 'description', 'altLabel', 'Synonym', 'Definition')
s = s.query(q)
es_response = s.execute()
return es_response.to_dict()
@hook('after_request')
def enable_cors():
response.headers['Access-Control-Allow-Origin'] = '*'
@get("/")
def index():
return static_file("index.html", static_root)
def render_rdf(doc, format):
g = Graph().parse(data=json.dumps(doc), format="json-ld", publicID=request.url)
return g.serialize(format=format)
def json_pretty(doc):
return json.dumps(doc, indent=4, sort_keys=True)
def html_pre(json):
template = """<!DOCTYPE html><html><body>
<pre>
%s
</pre>
</body></html>
"""
return template % cgi.escape(json_pretty(json))
@get("/indexes")
@produces(
default = "json",
#json = lambda **doc: doc,
json = lambda **doc: json_pretty(doc),
jsonld = lambda **doc: json.dumps(doc),
html = lambda **doc: html_pre(doc),
turtle = lambda **doc: render_rdf(doc, "turtle"),
rdfxml = lambda **doc: render_rdf(doc, "xml"),
nt = lambda **doc: render_rdf(doc, "nt")
)
def index_info():
response.content_type = 'application/json'
response.set_header("Access-Control-Allow-Origin", "*")
indexes = []
for index in conf.get("indexes"):
indexes.append(index)
return {"indexes": indexes}
@get("/search")
@get("/search/<query>")
@produces(
default = "json",
#json = lambda **doc: doc,
json = lambda **doc: json_pretty(doc),
jsonld = lambda **doc: json.dumps(doc),
html = lambda **doc: html_pre(doc),
turtle = lambda **doc: render_rdf(doc, "turtle"),
rdfxml = lambda **doc: render_rdf(doc, "xml"),
nt = lambda **doc: render_rdf(doc, "nt")
)
def search_json(query=None):
if query is None:
# Get from ?q parameter instead, if exist
query = request.query.query
branch = request.query.getall("branch")
limit = request.query.limit
ops_type = request.query.type
options = request.query.getall("options")
fuzzy = False
id = quote(url("/search/<query>", query=query))
response.set_header("Content-Location", id)
# CORS header
response.set_header("Access-Control-Allow-Origin", "*")
if options != None and "fuzzy" in options:
fuzzy = True
if limit == "":
limit = "25"
if ops_type == "":
ops_type = None
if branch != "" and not set(branch).issubset(conf["indexes"].keys()):
response.status = 422
return {'error': 'One of your selected branches is not available for searching'}
search = es_search(query, branch, ops_type, limit, fuzzy)
if ops_type == None:
search["type"] = "_all"
else:
search["type"] = ops_type
if branch == "":
search["branch"] = "_all"
else:
search["branch"] = branch
if options != None and "uris_only" in options:
uris = []
for hit in search["hits"]["hits"]:
uris.append(hit["_id"])
return {"uris": uris}
else:
search.pop("_shards", None)
return search
@post("/search")
@produces(
default = "json",
#json = lambda **doc: doc,
json = lambda **doc: json_pretty(doc),
jsonld = lambda **doc: json.dumps(doc),
html = lambda **doc: html_pre(doc),
turtle = lambda **doc: render_rdf(doc, "turtle"),
rdfxml = lambda **doc: render_rdf(doc, "xml"),
nt = lambda **doc: render_rdf(doc, "nt")
)
def search_json_post(query=None):
postdata = request.body.read()
query = None
limit = None
ops_type = None
branch = None
options = None
if "query" in request.json:
query = request.json["query"]
if "limit" in request.json:
limit = request.json["limit"]
if "branch" in request.json:
branch = request.json["branch"]
if "type" in request.json:
ops_type = request.json["type"]
if "options" in request.json:
options = request.json["options"]
fuzzy = False
response.set_header("Content-Location", id)
# CORS header
response.set_header("Access-Control-Allow-Origin", "*")
if options != None and "fuzzy" in options:
fuzzy = True
if limit == None:
limit = "25"
if branch != None and not set(branch).issubset(conf["indexes"].keys()):
response.status = 422
return {'error': 'One of your selected branches is not available for searching'}
search = es_search(query, branch, ops_type, limit, fuzzy)
if ops_type == None:
search["type"] = "_all"
else:
search["type"] = ops_type
if branch == None:
search["branch"] = "_all"
else:
search["branch"] = branch
if options != None and "uris_only" in options:
uris = []
for hit in search["hits"]["hits"]:
uris.append(hit["_id"])
return {"uris": uris}
else:
search.pop("_shards", None)
return search
@get("/autocomplete")
def autocomplete_json():
query = request.query.query
branch = request.query.getall("branch")
limit = request.query.limit
ops_type = request.query.type
#options = request.query.getall("options")
id = quote(url("/search", query=query))
response.set_header("Content-Location", id)
# CORS header
response.set_header("Access-Control-Allow-Origin", "*")
response.content_type = 'application/json'
if limit == "":
limit = "25"
if ops_type == "":
ops_type = None
if branch != "" and not set(branch).issubset(conf["indexes"].keys()):
response.status = 422
return {'error': 'One of your selected branches is not available for searching'}
search = es_autocomplete(query, branch, ops_type, limit)
if ops_type == None:
search["type"] = "_all"
else:
search["type"] = ops_type
if branch == "":
search["branch"] = "_all"
else:
search["branch"] = branch
labels = []
for hit in search["hits"]["hits"]:
if "label" in hit["_source"]:
labels.append(hit["_source"]["label"][0])
elif "title" in hit["_source"]:
labels.append(hit["_source"]["title"][0])
elif "prefLabel" in hit["_source"]:
labels.append(hit["_source"]["prefLabel"][0])
# Remove duplicates
labels = set(labels)
labels = list(labels)
values = []
for label in labels:
values.append({"value": label})
return dumps(values)
def main(config_file, port="8839", *args):
global conf
with open(config_file) as f:
conf = yaml.load(f)
ws_host = conf["webservice"]["host"]
ws_port = conf["webservice"]["port"]
run(host=ws_host, port=int(ws_port), reloader=True)
if __name__ == "__main__":
main(*sys.argv[1:])
application = bottle.default_app()
| 33.248201 | 276 | 0.632046 |
793ec5ae64292319fc7eac188edf419e7f3ad2c0 | 6,052 | py | Python | utils/callbacks.py | kant/rl-baselines3-zoo | 75f9c4c8395d622184177795696973ae37c58ae7 | [
"MIT"
] | null | null | null | utils/callbacks.py | kant/rl-baselines3-zoo | 75f9c4c8395d622184177795696973ae37c58ae7 | [
"MIT"
] | null | null | null | utils/callbacks.py | kant/rl-baselines3-zoo | 75f9c4c8395d622184177795696973ae37c58ae7 | [
"MIT"
] | null | null | null | import os
import tempfile
import time
from copy import deepcopy
from threading import Thread
from typing import Optional
import optuna
from sb3_contrib import TQC
from stable_baselines3 import SAC
from stable_baselines3.common.callbacks import BaseCallback, EvalCallback
from stable_baselines3.common.vec_env import VecEnv
class TrialEvalCallback(EvalCallback):
"""
Callback used for evaluating and reporting a trial.
"""
def __init__(
self,
eval_env: VecEnv,
trial: optuna.Trial,
n_eval_episodes: int = 5,
eval_freq: int = 10000,
deterministic: bool = True,
verbose: int = 0,
):
super(TrialEvalCallback, self).__init__(
eval_env=eval_env,
n_eval_episodes=n_eval_episodes,
eval_freq=eval_freq,
deterministic=deterministic,
verbose=verbose,
)
self.trial = trial
self.eval_idx = 0
self.is_pruned = False
def _on_step(self) -> bool:
if self.eval_freq > 0 and self.n_calls % self.eval_freq == 0:
super(TrialEvalCallback, self)._on_step()
self.eval_idx += 1
# report best or report current ?
# report num_timesteps or elasped time ?
self.trial.report(self.last_mean_reward, self.eval_idx)
# Prune trial if need
if self.trial.should_prune():
self.is_pruned = True
return False
return True
class SaveVecNormalizeCallback(BaseCallback):
"""
Callback for saving a VecNormalize wrapper every ``save_freq`` steps
:param save_freq: (int)
:param save_path: (str) Path to the folder where ``VecNormalize`` will be saved, as ``vecnormalize.pkl``
:param name_prefix: (str) Common prefix to the saved ``VecNormalize``, if None (default)
only one file will be kept.
"""
def __init__(self, save_freq: int, save_path: str, name_prefix: Optional[str] = None, verbose: int = 0):
super(SaveVecNormalizeCallback, self).__init__(verbose)
self.save_freq = save_freq
self.save_path = save_path
self.name_prefix = name_prefix
def _init_callback(self) -> None:
# Create folder if needed
if self.save_path is not None:
os.makedirs(self.save_path, exist_ok=True)
def _on_step(self) -> bool:
if self.n_calls % self.save_freq == 0:
if self.name_prefix is not None:
path = os.path.join(self.save_path, f"{self.name_prefix}_{self.num_timesteps}_steps.pkl")
else:
path = os.path.join(self.save_path, "vecnormalize.pkl")
if self.model.get_vec_normalize_env() is not None:
self.model.get_vec_normalize_env().save(path)
if self.verbose > 1:
print(f"Saving VecNormalize to {path}")
return True
class ParallelTrainCallback(BaseCallback):
"""
Callback to explore (collect experience) and train (do gradient steps)
at the same time using two separate threads.
Normally used with off-policy algorithms and `train_freq=(1, "episode")`.
TODO:
- blocking mode: wait for the model to finish updating the policy before collecting new experience
at the end of a rollout
- force sync mode: stop training to update to the latest policy for collecting
new experience
:param gradient_steps: Number of gradient steps to do before
sending the new policy
:param verbose: Verbosity level
:param sleep_time: Limit the fps in the thread collecting experience.
"""
def __init__(self, gradient_steps: int = 100, verbose: int = 0, sleep_time: float = 0.0):
super(ParallelTrainCallback, self).__init__(verbose)
self.batch_size = 0
self._model_ready = True
self._model = None
self.gradient_steps = gradient_steps
self.process = None
self.model_class = None
self.sleep_time = sleep_time
def _init_callback(self) -> None:
temp_file = tempfile.TemporaryFile()
self.model.save(temp_file)
# TODO: add support for other algorithms
for model_class in [SAC, TQC]:
if isinstance(self.model, model_class):
self.model_class = model_class
break
assert self.model_class is not None, f"{self.model} is not supported for parallel training"
self._model = self.model_class.load(temp_file)
self.batch_size = self._model.batch_size
# TODO: update SB3 and check train freq instead
# of gradient_steps > 0
self.model.gradient_steps = 1
self.model.tau = 0.0
self.model.learning_rate = 0.0
self.model.batch_size = 1
def train(self) -> None:
self._model_ready = False
self.process = Thread(target=self._train_thread, daemon=True)
self.process.start()
def _train_thread(self) -> None:
self._model.train(gradient_steps=self.gradient_steps, batch_size=self.batch_size)
self._model_ready = True
self.logger.record("train/n_updates_real", self._model._n_updates, exclude="tensorboard")
def _on_step(self) -> bool:
if self.sleep_time > 0:
time.sleep(self.sleep_time)
return True
def _on_rollout_end(self) -> None:
if self._model_ready:
self._model.replay_buffer = deepcopy(self.model.replay_buffer)
self.model.set_parameters(deepcopy(self._model.get_parameters()))
self.model.actor = self.model.policy.actor
if self.num_timesteps >= self._model.learning_starts:
self.train()
# Do not wait for the training loop to finish
# self.process.join()
def _on_training_end(self) -> None:
# Wait for the thread to terminate
if self.process is not None:
if self.verbose > 0:
print("Waiting for training thread to terminate")
self.process.join()
| 36.02381 | 108 | 0.640449 |
793ec5f0f4c5efbc7b59b1897bd62471a075117f | 29,679 | py | Python | cupy/__init__.py | mnicely/cupy | 9ed1d0db1404e9baa3c139032515d6028078bc91 | [
"MIT"
] | null | null | null | cupy/__init__.py | mnicely/cupy | 9ed1d0db1404e9baa3c139032515d6028078bc91 | [
"MIT"
] | null | null | null | cupy/__init__.py | mnicely/cupy | 9ed1d0db1404e9baa3c139032515d6028078bc91 | [
"MIT"
] | null | null | null | import functools
import sys
import warnings
import numpy
from cupy import _environment
from cupy import _version
if sys.platform.startswith('win32') and (3, 8) <= sys.version_info: # NOQA
_environment._setup_win32_dll_directory() # NOQA
try:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=ImportWarning,
message='can\'t resolve package from __spec__')
from cupy import core # NOQA
except ImportError as e:
# core is a c-extension module.
# When a user cannot import core, it represents that CuPy is not correctly
# built.
exc_info = sys.exc_info()
msg = ('''\
CuPy is not correctly installed.
If you are using wheel distribution (cupy-cudaXX), make sure that the version of CuPy you installed matches with the version of CUDA on your host.
Also, confirm that only one CuPy package is installed:
$ pip freeze
If you are building CuPy from source, please check your environment, uninstall CuPy and reinstall it with:
$ pip install cupy --no-cache-dir -vvvv
Check the Installation Guide for details:
https://docs-cupy.chainer.org/en/latest/install.html
original error: {}'''.format(exc_info[1])) # NOQA
raise ImportError(msg) from e
from cupy import cuda
# Do not make `cupy.cupyx` available because it is confusing.
import cupyx as _cupyx
def is_available():
return cuda.is_available()
__version__ = _version.__version__
from cupy import binary # NOQA
import cupy.core.fusion # NOQA
from cupy import creation # NOQA
from cupy import fft # NOQA
from cupy import functional # NOQA
from cupy import indexing # NOQA
from cupy import io # NOQA
from cupy import linalg # NOQA
from cupy import manipulation # NOQA
from cupy import padding # NOQA
from cupy import polynomial # NOQA
from cupy import random # NOQA
from cupy import _sorting # NOQA
from cupy import sparse # NOQA
from cupy import statistics # NOQA
from cupy import testing # NOQA # NOQA
from cupy import util # NOQA
from cupy import lib # NOQA
# import class and function
from cupy.core import ndarray # NOQA
from cupy.core import ufunc # NOQA
# =============================================================================
# Constants (borrowed from NumPy)
# =============================================================================
from numpy import e # NOQA
from numpy import euler_gamma # NOQA
from numpy import Inf # NOQA
from numpy import inf # NOQA
from numpy import Infinity # NOQA
from numpy import infty # NOQA
from numpy import NAN # NOQA
from numpy import NaN # NOQA
from numpy import nan # NOQA
from numpy import newaxis # == None # NOQA
from numpy import NINF # NOQA
from numpy import NZERO # NOQA
from numpy import pi # NOQA
from numpy import PINF # NOQA
from numpy import PZERO # NOQA
# =============================================================================
# Data types (borrowed from NumPy)
#
# The order of these declarations are borrowed from the NumPy document:
# https://docs.scipy.org/doc/numpy/reference/arrays.scalars.html
# =============================================================================
# -----------------------------------------------------------------------------
# Generic types
# -----------------------------------------------------------------------------
from numpy import complexfloating # NOQA
from numpy import floating # NOQA
from numpy import generic # NOQA
from numpy import inexact # NOQA
from numpy import integer # NOQA
from numpy import number # NOQA
from numpy import signedinteger # NOQA
from numpy import unsignedinteger # NOQA
# Not supported by CuPy:
# from numpy import flexible
# from numpy import character
# -----------------------------------------------------------------------------
# Booleans
# -----------------------------------------------------------------------------
from numpy import bool_ # NOQA
from numpy import bool8 # NOQA
# -----------------------------------------------------------------------------
# Integers
# -----------------------------------------------------------------------------
from numpy import byte # NOQA
from numpy import short # NOQA
from numpy import intc # NOQA
from numpy import int_ # NOQA
from numpy import longlong # NOQA
from numpy import intp # NOQA
from numpy import int8 # NOQA
from numpy import int16 # NOQA
from numpy import int32 # NOQA
from numpy import int64 # NOQA
# -----------------------------------------------------------------------------
# Unsigned integers
# -----------------------------------------------------------------------------
from numpy import ubyte # NOQA
from numpy import ushort # NOQA
from numpy import uintc # NOQA
from numpy import uint # NOQA
from numpy import ulonglong # NOQA
from numpy import uintp # NOQA
from numpy import uint8 # NOQA
from numpy import uint16 # NOQA
from numpy import uint32 # NOQA
from numpy import uint64 # NOQA
# -----------------------------------------------------------------------------
# Floating-point numbers
# -----------------------------------------------------------------------------
from numpy import half # NOQA
from numpy import single # NOQA
from numpy import double # NOQA
from numpy import float_ # NOQA
from numpy import longfloat # NOQA
from numpy import float16 # NOQA
from numpy import float32 # NOQA
from numpy import float64 # NOQA
# Not supported by CuPy:
# from numpy import float96
# from numpy import float128
# -----------------------------------------------------------------------------
# Complex floating-point numbers
# -----------------------------------------------------------------------------
from numpy import csingle # NOQA
from numpy import complex_ # NOQA
from numpy import complex64 # NOQA
from numpy import complex128 # NOQA
# Not supported by CuPy:
# from numpy import complex192
# from numpy import complex256
# from numpy import clongfloat
# -----------------------------------------------------------------------------
# Any Python object
# -----------------------------------------------------------------------------
# Not supported by CuPy:
# from numpy import object_
# from numpy import bytes_
# from numpy import unicode_
# from numpy import void
# -----------------------------------------------------------------------------
# Built-in Python types
# -----------------------------------------------------------------------------
from numpy import int # NOQA
from numpy import bool # NOQA
from numpy import float # NOQA
from numpy import complex # NOQA
# Not supported by CuPy:
# from numpy import object
# from numpy import unicode
# from numpy import str
# =============================================================================
# Routines
#
# The order of these declarations are borrowed from the NumPy document:
# https://docs.scipy.org/doc/numpy/reference/routines.html
# =============================================================================
# -----------------------------------------------------------------------------
# Array creation routines
# -----------------------------------------------------------------------------
from cupy.creation.basic import empty # NOQA
from cupy.creation.basic import empty_like # NOQA
from cupy.creation.basic import eye # NOQA
from cupy.creation.basic import full # NOQA
from cupy.creation.basic import full_like # NOQA
from cupy.creation.basic import identity # NOQA
from cupy.creation.basic import ones # NOQA
from cupy.creation.basic import ones_like # NOQA
from cupy.creation.basic import zeros # NOQA
from cupy.creation.basic import zeros_like # NOQA
from cupy.creation.from_data import copy # NOQA
from cupy.creation.from_data import array # NOQA
from cupy.creation.from_data import asanyarray # NOQA
from cupy.creation.from_data import asarray # NOQA
from cupy.creation.from_data import ascontiguousarray # NOQA
from cupy.creation.from_data import fromfile # NOQA
from cupy.creation.ranges import arange # NOQA
from cupy.creation.ranges import linspace # NOQA
from cupy.creation.ranges import logspace # NOQA
from cupy.creation.ranges import meshgrid # NOQA
from cupy.creation.ranges import mgrid # NOQA
from cupy.creation.ranges import ogrid # NOQA
from cupy.creation.matrix import diag # NOQA
from cupy.creation.matrix import diagflat # NOQA
from cupy.creation.matrix import tri # NOQA
from cupy.creation.matrix import tril # NOQA
from cupy.creation.matrix import triu # NOQA
# -----------------------------------------------------------------------------
# Functional routines
# -----------------------------------------------------------------------------
from cupy.functional.piecewise import piecewise # NOQA
# -----------------------------------------------------------------------------
# Array manipulation routines
# -----------------------------------------------------------------------------
from cupy.manipulation.basic import copyto # NOQA
from cupy.manipulation.shape import shape # NOQA
from cupy.manipulation.shape import ravel # NOQA
from cupy.manipulation.shape import reshape # NOQA
from cupy.manipulation.transpose import moveaxis # NOQA
from cupy.manipulation.transpose import rollaxis # NOQA
from cupy.manipulation.transpose import swapaxes # NOQA
from cupy.manipulation.transpose import transpose # NOQA
from cupy.manipulation.dims import atleast_1d # NOQA
from cupy.manipulation.dims import atleast_2d # NOQA
from cupy.manipulation.dims import atleast_3d # NOQA
from cupy.manipulation.dims import broadcast # NOQA
from cupy.manipulation.dims import broadcast_arrays # NOQA
from cupy.manipulation.dims import broadcast_to # NOQA
from cupy.manipulation.dims import expand_dims # NOQA
from cupy.manipulation.dims import squeeze # NOQA
from cupy.manipulation.join import column_stack # NOQA
from cupy.manipulation.join import concatenate # NOQA
from cupy.manipulation.join import dstack # NOQA
from cupy.manipulation.join import hstack # NOQA
from cupy.manipulation.join import stack # NOQA
from cupy.manipulation.join import vstack # NOQA
from cupy.manipulation.kind import asfortranarray # NOQA
from cupy.manipulation.kind import require # NOQA
from cupy.manipulation.split import array_split # NOQA
from cupy.manipulation.split import dsplit # NOQA
from cupy.manipulation.split import hsplit # NOQA
from cupy.manipulation.split import split # NOQA
from cupy.manipulation.split import vsplit # NOQA
from cupy.manipulation.tiling import repeat # NOQA
from cupy.manipulation.tiling import tile # NOQA
from cupy.manipulation.add_remove import unique # NOQA
from cupy.manipulation.add_remove import trim_zeros # NOQA
from cupy.manipulation.rearrange import flip # NOQA
from cupy.manipulation.rearrange import fliplr # NOQA
from cupy.manipulation.rearrange import flipud # NOQA
from cupy.manipulation.rearrange import roll # NOQA
from cupy.manipulation.rearrange import rot90 # NOQA
# -----------------------------------------------------------------------------
# Binary operations
# -----------------------------------------------------------------------------
from cupy.binary.elementwise import bitwise_and # NOQA
from cupy.binary.elementwise import bitwise_or # NOQA
from cupy.binary.elementwise import bitwise_xor # NOQA
from cupy.binary.elementwise import bitwise_not # NOQA
from cupy.binary.elementwise import invert # NOQA
from cupy.binary.elementwise import left_shift # NOQA
from cupy.binary.elementwise import right_shift # NOQA
from cupy.binary.packing import packbits # NOQA
from cupy.binary.packing import unpackbits # NOQA
def binary_repr(num, width=None):
"""Return the binary representation of the input number as a string.
.. seealso:: :func:`numpy.binary_repr`
"""
return numpy.binary_repr(num, width)
# -----------------------------------------------------------------------------
# Data type routines (borrowed from NumPy)
# -----------------------------------------------------------------------------
def can_cast(from_, to, casting='safe'):
"""Returns True if cast between data types can occur according to the
casting rule. If from is a scalar or array scalar, also returns True if the
scalar value can be cast without overflow or truncation to an integer.
.. seealso:: :func:`numpy.can_cast`
"""
from_ = from_.dtype if isinstance(from_, cupy.ndarray) else from_
return numpy.can_cast(from_, to, casting=casting)
def common_type(*arrays):
"""Return a scalar type which is common to the input arrays.
.. seealso:: :func:`numpy.common_type`
"""
if len(arrays) == 0:
return numpy.float16
default_float_dtype = numpy.dtype('float64')
dtypes = []
for a in arrays:
if a.dtype.kind == 'b':
raise TypeError('can\'t get common type for non-numeric array')
elif a.dtype.kind in 'iu':
dtypes.append(default_float_dtype)
else:
dtypes.append(a.dtype)
return functools.reduce(numpy.promote_types, dtypes).type
def result_type(*arrays_and_dtypes):
"""Returns the type that results from applying the NumPy type promotion
rules to the arguments.
.. seealso:: :func:`numpy.result_type`
"""
dtypes = [a.dtype if isinstance(a, cupy.ndarray)
else a for a in arrays_and_dtypes]
return numpy.result_type(*dtypes)
from numpy import min_scalar_type # NOQA
from numpy import obj2sctype # NOQA
from numpy import promote_types # NOQA
from numpy import dtype # NOQA
from numpy import format_parser # NOQA
from numpy import finfo # NOQA
from numpy import iinfo # NOQA
from numpy import MachAr # NOQA
from numpy import find_common_type # NOQA
from numpy import issctype # NOQA
from numpy import issubclass_ # NOQA
from numpy import issubdtype # NOQA
from numpy import issubsctype # NOQA
from numpy import mintypecode # NOQA
from numpy import sctype2char # NOQA
from numpy import typename # NOQA
# -----------------------------------------------------------------------------
# Optionally Scipy-accelerated routines
# -----------------------------------------------------------------------------
# TODO(beam2d): Implement it
# -----------------------------------------------------------------------------
# Discrete Fourier Transform
# -----------------------------------------------------------------------------
# TODO(beam2d): Implement it
# -----------------------------------------------------------------------------
# Indexing routines
# -----------------------------------------------------------------------------
from cupy.indexing.generate import c_ # NOQA
from cupy.indexing.generate import indices # NOQA
from cupy.indexing.generate import ix_ # NOQA
from cupy.indexing.generate import r_ # NOQA
from cupy.indexing.generate import ravel_multi_index # NOQA
from cupy.indexing.generate import unravel_index # NOQA
from cupy.indexing.indexing import choose # NOQA
from cupy.indexing.indexing import compress # NOQA
from cupy.indexing.indexing import diagonal # NOQA
from cupy.indexing.indexing import extract # NOQA
from cupy.indexing.indexing import select # NOQA
from cupy.indexing.indexing import take # NOQA
from cupy.indexing.indexing import take_along_axis # NOQA
from cupy.indexing.insert import place # NOQA
from cupy.indexing.insert import put # NOQA
from cupy.indexing.insert import putmask # NOQA
from cupy.indexing.insert import fill_diagonal # NOQA
from cupy.indexing.insert import diag_indices # NOQA
from cupy.indexing.insert import diag_indices_from # NOQA
from cupy.indexing.iterate import flatiter # NOQA
# -----------------------------------------------------------------------------
# Input and output
# -----------------------------------------------------------------------------
from cupy.io.npz import load # NOQA
from cupy.io.npz import save # NOQA
from cupy.io.npz import savez # NOQA
from cupy.io.npz import savez_compressed # NOQA
from cupy.io.formatting import array_repr # NOQA
from cupy.io.formatting import array_str # NOQA
def base_repr(number, base=2, padding=0): # NOQA (needed to avoid redefinition of `number`)
"""Return a string representation of a number in the given base system.
.. seealso:: :func:`numpy.base_repr`
"""
return numpy.base_repr(number, base, padding)
# -----------------------------------------------------------------------------
# Linear algebra
# -----------------------------------------------------------------------------
from cupy.linalg.einsum import einsum # NOQA
from cupy.linalg.product import cross # NOQA
from cupy.linalg.product import dot # NOQA
from cupy.linalg.product import inner # NOQA
from cupy.linalg.product import kron # NOQA
from cupy.linalg.product import matmul # NOQA
from cupy.linalg.product import outer # NOQA
from cupy.linalg.product import tensordot # NOQA
from cupy.linalg.product import vdot # NOQA
from cupy.linalg.norms import trace # NOQA
# -----------------------------------------------------------------------------
# Logic functions
# -----------------------------------------------------------------------------
from cupy.logic.comparison import allclose # NOQA
from cupy.logic.comparison import array_equal # NOQA
from cupy.logic.comparison import isclose # NOQA
from cupy.logic.content import isfinite # NOQA
from cupy.logic.content import isinf # NOQA
from cupy.logic.content import isnan # NOQA
from cupy.logic.truth import in1d # NOQA
from cupy.logic.truth import isin # NOQA
from cupy.logic.type_test import iscomplex # NOQA
from cupy.logic.type_test import iscomplexobj # NOQA
from cupy.logic.type_test import isfortran # NOQA
from cupy.logic.type_test import isreal # NOQA
from cupy.logic.type_test import isrealobj # NOQA
from cupy.logic.truth import in1d # NOQA
from cupy.logic.truth import isin # NOQA
def isscalar(element):
"""Returns True if the type of num is a scalar type.
.. seealso:: :func:`numpy.isscalar`
"""
return numpy.isscalar(element)
from cupy.logic.ops import logical_and # NOQA
from cupy.logic.ops import logical_not # NOQA
from cupy.logic.ops import logical_or # NOQA
from cupy.logic.ops import logical_xor # NOQA
from cupy.logic.comparison import equal # NOQA
from cupy.logic.comparison import greater # NOQA
from cupy.logic.comparison import greater_equal # NOQA
from cupy.logic.comparison import less # NOQA
from cupy.logic.comparison import less_equal # NOQA
from cupy.logic.comparison import not_equal # NOQA
from cupy.logic.truth import all # NOQA
from cupy.logic.truth import any # NOQA
# -----------------------------------------------------------------------------
# Mathematical functions
# -----------------------------------------------------------------------------
from cupy.math.trigonometric import arccos # NOQA
from cupy.math.trigonometric import arcsin # NOQA
from cupy.math.trigonometric import arctan # NOQA
from cupy.math.trigonometric import arctan2 # NOQA
from cupy.math.trigonometric import cos # NOQA
from cupy.math.trigonometric import deg2rad # NOQA
from cupy.math.trigonometric import degrees # NOQA
from cupy.math.trigonometric import hypot # NOQA
from cupy.math.trigonometric import rad2deg # NOQA
from cupy.math.trigonometric import radians # NOQA
from cupy.math.trigonometric import sin # NOQA
from cupy.math.trigonometric import tan # NOQA
from cupy.math.trigonometric import unwrap # NOQA
from cupy.math.hyperbolic import arccosh # NOQA
from cupy.math.hyperbolic import arcsinh # NOQA
from cupy.math.hyperbolic import arctanh # NOQA
from cupy.math.hyperbolic import cosh # NOQA
from cupy.math.hyperbolic import sinh # NOQA
from cupy.math.hyperbolic import tanh # NOQA
from cupy.math.rounding import around # NOQA
from cupy.math.rounding import ceil # NOQA
from cupy.math.rounding import fix # NOQA
from cupy.math.rounding import floor # NOQA
from cupy.math.rounding import rint # NOQA
from cupy.math.rounding import round_ # NOQA
from cupy.math.rounding import trunc # NOQA
from cupy.math.sumprod import prod # NOQA
from cupy.math.sumprod import sum # NOQA
from cupy.math.sumprod import cumprod # NOQA
from cupy.math.sumprod import cumsum # NOQA
from cupy.math.sumprod import nansum # NOQA
from cupy.math.sumprod import nanprod # NOQA
from cupy.math.sumprod import diff # NOQA
from cupy.math.window import bartlett # NOQA
from cupy.math.window import blackman # NOQA
from cupy.math.window import hamming # NOQA
from cupy.math.window import hanning # NOQA
from cupy.math.window import kaiser # NOQA
from cupy.math.explog import exp # NOQA
from cupy.math.explog import exp2 # NOQA
from cupy.math.explog import expm1 # NOQA
from cupy.math.explog import log # NOQA
from cupy.math.explog import log10 # NOQA
from cupy.math.explog import log1p # NOQA
from cupy.math.explog import log2 # NOQA
from cupy.math.explog import logaddexp # NOQA
from cupy.math.explog import logaddexp2 # NOQA
from cupy.math.special import i0 # NOQA
from cupy.math.special import sinc # NOQA
from cupy.math.floating import copysign # NOQA
from cupy.math.floating import frexp # NOQA
from cupy.math.floating import ldexp # NOQA
from cupy.math.floating import nextafter # NOQA
from cupy.math.floating import signbit # NOQA
from cupy.math.rational import gcd # NOQA
from cupy.math.rational import lcm # NOQA
from cupy.math.arithmetic import add # NOQA
from cupy.math.arithmetic import divide # NOQA
from cupy.math.arithmetic import divmod # NOQA
from cupy.math.arithmetic import floor_divide # NOQA
from cupy.math.arithmetic import fmod # NOQA
from cupy.math.arithmetic import modf # NOQA
from cupy.math.arithmetic import multiply # NOQA
from cupy.math.arithmetic import negative # NOQA
from cupy.math.arithmetic import power # NOQA
from cupy.math.arithmetic import reciprocal # NOQA
from cupy.math.arithmetic import remainder # NOQA
from cupy.math.arithmetic import remainder as mod # NOQA
from cupy.math.arithmetic import subtract # NOQA
from cupy.math.arithmetic import true_divide # NOQA
from cupy.math.arithmetic import angle # NOQA
from cupy.math.arithmetic import conjugate as conj # NOQA
from cupy.math.arithmetic import conjugate # NOQA
from cupy.math.arithmetic import imag # NOQA
from cupy.math.arithmetic import real # NOQA
from cupy.math.misc import absolute as abs # NOQA
from cupy.math.misc import absolute # NOQA
from cupy.math.misc import cbrt # NOQA
from cupy.math.misc import clip # NOQA
from cupy.math.misc import fmax # NOQA
from cupy.math.misc import fmin # NOQA
from cupy.math.misc import maximum # NOQA
from cupy.math.misc import minimum # NOQA
from cupy.math.misc import nan_to_num # NOQA
from cupy.math.misc import sign # NOQA
from cupy.math.misc import sqrt # NOQA
from cupy.math.misc import square # NOQA
# -----------------------------------------------------------------------------
# Miscellaneous routines
# -----------------------------------------------------------------------------
from cupy.misc import may_share_memory # NOQA
from cupy.misc import shares_memory # NOQA
from cupy.misc import who # NOQA
# -----------------------------------------------------------------------------
# Padding
# -----------------------------------------------------------------------------
pad = padding.pad.pad
# -----------------------------------------------------------------------------
# Sorting, searching, and counting
# -----------------------------------------------------------------------------
from cupy._sorting.count import count_nonzero # NOQA
from cupy._sorting.search import argmax # NOQA
from cupy._sorting.search import argmin # NOQA
from cupy._sorting.search import argwhere # NOQA
from cupy._sorting.search import flatnonzero # NOQA
from cupy._sorting.search import nanargmax # NOQA
from cupy._sorting.search import nanargmin # NOQA
from cupy._sorting.search import nonzero # NOQA
from cupy._sorting.search import searchsorted # NOQA
from cupy._sorting.search import where # NOQA
from cupy._sorting.sort import argpartition # NOQA
from cupy._sorting.sort import argsort # NOQA
from cupy._sorting.sort import lexsort # NOQA
from cupy._sorting.sort import msort # NOQA
from cupy._sorting.sort import sort_complex # NOQA
from cupy._sorting.sort import partition # NOQA
from cupy._sorting.sort import sort # NOQA
# -----------------------------------------------------------------------------
# Statistics
# -----------------------------------------------------------------------------
from cupy.statistics.correlation import corrcoef # NOQA
from cupy.statistics.correlation import cov # NOQA
from cupy.statistics.order import amax # NOQA
from cupy.statistics.order import amax as max # NOQA
from cupy.statistics.order import amin # NOQA
from cupy.statistics.order import amin as min # NOQA
from cupy.statistics.order import nanmax # NOQA
from cupy.statistics.order import nanmin # NOQA
from cupy.statistics.order import percentile # NOQA
from cupy.statistics.order import ptp # NOQA
from cupy.statistics.meanvar import median # NOQA
from cupy.statistics.meanvar import average # NOQA
from cupy.statistics.meanvar import mean # NOQA
from cupy.statistics.meanvar import std # NOQA
from cupy.statistics.meanvar import var # NOQA
from cupy.statistics.meanvar import nanmean # NOQA
from cupy.statistics.meanvar import nanstd # NOQA
from cupy.statistics.meanvar import nanvar # NOQA
from cupy.statistics.histogram import bincount # NOQA
from cupy.statistics.histogram import digitize # NOQA
from cupy.statistics.histogram import histogram # NOQA
# -----------------------------------------------------------------------------
# Undocumented functions
# -----------------------------------------------------------------------------
from cupy.core import size # NOQA
# -----------------------------------------------------------------------------
# CuPy specific functions
# -----------------------------------------------------------------------------
from cupy.util import clear_memo # NOQA
from cupy.util import memoize # NOQA
from cupy.core import ElementwiseKernel # NOQA
from cupy.core import RawKernel # NOQA
from cupy.core import RawModule # NOQA
from cupy.core._reduction import ReductionKernel # NOQA
# -----------------------------------------------------------------------------
# DLPack
# -----------------------------------------------------------------------------
from cupy.core import fromDlpack # NOQA
def asnumpy(a, stream=None, order='C'):
"""Returns an array on the host memory from an arbitrary source array.
Args:
a: Arbitrary object that can be converted to :class:`numpy.ndarray`.
stream (cupy.cuda.Stream): CUDA stream object. If it is specified, then
the device-to-host copy runs asynchronously. Otherwise, the copy is
synchronous. Note that if ``a`` is not a :class:`cupy.ndarray`
object, then this argument has no effect.
order ({'C', 'F', 'A'}): The desired memory layout of the host
array. When ``order`` is 'A', it uses 'F' if ``a`` is
fortran-contiguous and 'C' otherwise.
Returns:
numpy.ndarray: Converted array on the host memory.
"""
if isinstance(a, ndarray):
return a.get(stream=stream, order=order)
else:
return numpy.asarray(a, order=order)
_cupy = sys.modules[__name__]
def get_array_module(*args):
"""Returns the array module for arguments.
This function is used to implement CPU/GPU generic code. If at least one of
the arguments is a :class:`cupy.ndarray` object, the :mod:`cupy` module is
returned.
Args:
args: Values to determine whether NumPy or CuPy should be used.
Returns:
module: :mod:`cupy` or :mod:`numpy` is returned based on the types of
the arguments.
.. admonition:: Example
A NumPy/CuPy generic function can be written as follows
>>> def softplus(x):
... xp = cupy.get_array_module(x)
... return xp.maximum(0, x) + xp.log1p(xp.exp(-abs(x)))
"""
for arg in args:
if isinstance(arg, (ndarray, sparse.spmatrix,
cupy.core.fusion._FusionVarScalar,
cupy.core.fusion._FusionVarArray)):
return _cupy
return numpy
fuse = cupy.core.fusion.fuse
disable_experimental_feature_warning = False
# set default allocator
_default_memory_pool = cuda.MemoryPool()
_default_pinned_memory_pool = cuda.PinnedMemoryPool()
cuda.set_allocator(_default_memory_pool.malloc)
cuda.set_pinned_memory_allocator(_default_pinned_memory_pool.malloc)
def get_default_memory_pool():
"""Returns CuPy default memory pool for GPU memory.
Returns:
cupy.cuda.MemoryPool: The memory pool object.
.. note::
If you want to disable memory pool, please use the following code.
>>> cupy.cuda.set_allocator(None)
"""
return _default_memory_pool
def get_default_pinned_memory_pool():
"""Returns CuPy default memory pool for pinned memory.
Returns:
cupy.cuda.PinnedMemoryPool: The memory pool object.
.. note::
If you want to disable memory pool, please use the following code.
>>> cupy.cuda.set_pinned_memory_allocator(None)
"""
return _default_pinned_memory_pool
def show_config():
"""Prints the current runtime configuration to standard output."""
sys.stdout.write(str(_cupyx.get_runtime_info()))
sys.stdout.flush()
| 35.248219 | 146 | 0.631794 |
793ec6f46b21b0c4aa4bea57dc3794605265afdd | 9,030 | py | Python | poseidon/ui/mobile/common/Init.py | peterkang2001/Poseidon | cfafc01a1f69210dbfd95a0c62e06269eb599034 | [
"Apache-2.0"
] | 2 | 2019-12-27T09:14:38.000Z | 2019-12-27T09:16:29.000Z | poseidon/ui/mobile/common/Init.py | CodeMonkey4Fun/Poseidon | cfafc01a1f69210dbfd95a0c62e06269eb599034 | [
"Apache-2.0"
] | 2 | 2021-03-31T19:35:12.000Z | 2021-12-13T20:35:33.000Z | poseidon/ui/mobile/common/Init.py | peterkang2001/Poseidon | cfafc01a1f69210dbfd95a0c62e06269eb599034 | [
"Apache-2.0"
] | 1 | 2020-11-13T07:37:01.000Z | 2020-11-13T07:37:01.000Z | # coding=utf-8
"""
@author:songmengyun
@file: Init.py
@time: 2020/01/08
"""
import json
import logging
import subprocess
import sys
import os
import redis
import requests
import time
class Init(object):
def _execute(self, method=None, url=None, data=None, json_data=None, timeout=30, serial=None):
result = {}
maxtime = time.time() + float(timeout)
while time.time() < maxtime:
if "GET" == method:
try:
result = json.loads(requests.get(url, timeout=timeout).text)
except Exception as e:
logging.debug("URL is %s, result is %s" % (url, e))
if str(e).find('timeout') > -1:
self.ip = self.get_ip(serial)
result = {"status": "error",
"value": "连接异常,请检查设备:'{0}'端服务是否成功启动,或者检查设备是否连上'hujiang'无线网".format(serial)}
time.sleep(1)
continue
elif "POST" == method:
try:
result = json.loads(requests.post(url, timeout=timeout, data=data, json=json_data).text)
except Exception as e:
logging.debug("URL is %s, result is %s" % (url, e))
if str(e).find('timeout') > -1:
self.ip = self.get_ip(serial)
result = {"status": "error",
"value": "连接异常,请检查设备:'{0}'端服务是否成功启动,或者检查设备是否连上'hujiang'无线网".format(serial)}
time.sleep(1)
continue
elif "DELETE" == method:
try:
result = json.loads(requests.delete(url, timeout=timeout).text)
except Exception as e:
logging.debug("URL is %s, result is %s" % (url, e))
if str(e).find('timeout') > -1:
self.ip = self.get_ip(serial)
result = {"status": "error",
"value": "连接异常,请检查设备:'{0}'端服务是否成功启动,或者检查设备是否连上'hujiang'无线网".format(serial)}
time.sleep(1)
continue
if not result['value']:
time.sleep(1)
continue
else:
break
log_msg = str(result)
if len(log_msg) > 300:
log_msg = log_msg[0:300]
logging.info("URL is %s, result is %s" % (url, log_msg))
return result
@staticmethod
def shell(cmd):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True, stderr=subprocess.PIPE) # , close_fds=True)
out, err = p.communicate()
if p.returncode != 0:
logging.warning('please make sure you have installed the target cmd on your computer to execute "{0}" OR 设备不被支持 !!!'.format(cmd))
# sys.exit(0)
return "error"
return out.decode('utf-8')
def get_devices(self):
cmd = 'adb devices'
try:
data = self.shell(cmd)
data = data.strip('List of devices attached').split()
device_list = [x for x in data if x != 'device']
if len(device_list) > 0:
return device_list
else:
logging.info('No devices list, please start a deivce first!')
return []
except Exception as e:
print(e)
def start_appium_server(self):
print()
def start_android_server(self, serial):
count = 5
cmd = "adb -s {0} shell exec nohup app_process -Djava.class.path=/data/local/tmp/app-process.apk /system/bin com.yyl.android.Main >/dev/null 2>&1 &".format(serial)
self.shell(cmd)
time.sleep(5)
ip = self.get_ip(serial)
while self.status_android_server(ip, serial)['status'] == 'error' and count > 0:
count = count - 1
self.shell(cmd)
@staticmethod
def download(url, target_file_path):
r = requests.get(url, stream=True)
with open(target_file_path, "wb") as pdf:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
pdf.write(chunk)
def start_uiautomator_ui(self, serial):
logging.info("启动uiautomator ui")
return self.shell(
cmd="am start -a android.intent.action.VIEW -n com.yyl.android.hj.auto/com.yyl.android.hj.auto.MainActivity -W")
def get_ip(self, serial):
self.start_uiautomator_ui(serial)
r = redis.Redis(host='192.168.160.188', port=6379, db=15)
device_detail = r.get(serial)
device_detail = json.loads(device_detail.decode('utf-8'))
ip = device_detail['host']
if not ip:
print("检查设备{0}是否连上'hujiang'无线网")
return ''
else:
return ip
def start_hj_auto(self, serial):
cmd = "adb -s {0} shell am start -a android.intent.action.VIEW -n com.yyl.android.hj.auto/com.yyl.android.hj.auto.MainActivity -W".format(
serial)
self.shell(cmd)
def status_android_server(self, ip, serial):
logging.info("通过ip检查设备:{0} 60002服务是否成功启动".format(serial))
return self._execute(method="GET", url="http://{0}:{1}/health".format(ip, 60002), serial=serial)
def reboot(self, num):
devices = self.get_devices()
for device in devices:
init.shell("adb -s {0} shell reboot".format(device))
while True:
time.sleep(10)
devices = init.get_devices()
if len(devices) < num:
continue
else:
time.sleep(15)
break
if __name__ == '__main__':
try:
# if len(sys.argv) < 2:
# print("请指定初始化设备数")
# sys.exit(0)
# print("初始化设备数为: {0} ".format(sys.argv[1]))
init = Init()
devices = init.get_devices()
# if len(devices) < int(sys.argv[1]):
# print("请检查设备是否都能成功连接电脑,当前连接数为: {0},指定初始化设备数为:{1} ".format(len(devices), sys.argv[1]))
# sys.exit(0)
path_apk = os.getcwd()
print("apk下载目录 : " + os.getcwd())
print("设备重启中,请稍等片刻...")
init.reboot(int(sys.argv[1]))
print("设备重启执行完成")
print(os.path.abspath(path_apk + "/" + "app-process.apk"))
init.download(url="http://192.168.160.188:8080/download2/app-process.apk",
target_file_path=os.path.abspath(path_apk + "/" + "app-process.apk"))
print("'app-process.apk'下载执行完成")
init.download(
url="http://192.168.160.188:8080/download2/uiautomator2.androidTest.apk",
target_file_path=os.path.abspath(path_apk + "/" + "uiautomator2.androidTest.apk"))
print("'uiautomator2.androidTest.apk'下载执行完成")
init.download(url="http://192.168.160.188:8080/download2/uiautomator2-server.apk",
target_file_path=os.path.abspath(path_apk + "/" + "uiautomator2-server.apk"))
print("'uiautomator2-server.apk'下载执行完成")
init.download(url="http://192.168.160.188:8080/download2/hj_auto.apk",
target_file_path=os.path.abspath(path_apk + "/" + "hj_auto.apk"))
print("'hj_auto.apk'下载执行完成")
for device in devices:
init.shell("adb -s {0} shell pm uninstall io.appium.uiautomator2.server && adb -s {1} shell pm uninstall io.appium.uiautomator2.server.test && adb -s {2} shell pm uninstall com.yyl.android.hj.auto").format(device,device,device)
print(init.shell("adb -s {0} shell input swipe 300 1500 300 0 100".format(device)))
print(init.shell("adb -s {0} shell input keyevent 3".format(device)))
print("准备执行安装'uiautomator2.androidTest.apk'")
init.shell("adb -s {0} push {1} /sdcard/Download/".format(device, os.path.abspath(path_apk + "/" + "uiautomator2.androidTest.apk")))
print(init.shell("adb -s {0} install -r {1}".format(device, os.path.abspath(path_apk + "/" + "uiautomator2.androidTest.apk"))))
print("准备执行安装'uiautomator2-server.apk.apk'")
init.shell("adb -s {0} push {1} /sdcard/Download/".format(device, os.path.abspath(path_apk + "/" + "uiautomator2-server.apk")))
print(init.shell("adb -s {0} install -r {1}".format(device, os.path.abspath(path_apk + "/" + "uiautomator2-server.apk"))))
print("准备执行安装'hj_auto.apk'")
init.shell("adb -s {0} push {1} /sdcard/Download/".format(device, os.path.abspath(path_apk + "/" + "hj_auto.apk")))
print(init.shell("adb -s {0} install -r {1}".format(device, os.path.abspath(path_apk + "/" + "hj_auto.apk"))))
print("准备执行安装'app-process.apk'")
init.shell("adb -s {0} push {1} /data/local/tmp/".format(device, os.path.abspath(path_apk + "/" + "app-process.apk")))
print("准备启动'60002服务'")
init.start_android_server(device)
print("准备打开'hj_auto'")
init.start_hj_auto(device)
except Exception as e:
print(e)
| 40.675676 | 239 | 0.554485 |
793ec74acdca3071cb0a0d92ba72925d3ffa4513 | 9,740 | py | Python | examples/enclave_manager/tcf_enclave_manager/tcf_enclave_bridge.py | manojsalunke85/avalon | 99fc49ac215ac3dfcfb0547f8abebc0b131dfad1 | [
"Apache-2.0"
] | null | null | null | examples/enclave_manager/tcf_enclave_manager/tcf_enclave_bridge.py | manojsalunke85/avalon | 99fc49ac215ac3dfcfb0547f8abebc0b131dfad1 | [
"Apache-2.0"
] | null | null | null | examples/enclave_manager/tcf_enclave_manager/tcf_enclave_bridge.py | manojsalunke85/avalon | 99fc49ac215ac3dfcfb0547f8abebc0b131dfad1 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import json
import time
from ssl import SSLError
from requests.exceptions import Timeout
from requests.exceptions import HTTPError
import tcf_enclave_manager.ias_client as ias_client
import tcf_enclave_manager.tcf_enclave as enclave
import logging
logger = logging.getLogger(__name__)
TCFHOME = os.environ.get("TCF_HOME", "../../../")
send_to_sgx_worker = enclave.HandleWorkOrderRequest
get_enclave_public_info = enclave.UnsealEnclaveData
# -----------------------------------------------------------------
_tcf_enclave_info = None
_ias = None
_sig_rl_update_time = None
_sig_rl_update_period = 8 * 60 * 60 # in seconds every 8 hours
_epid_group = None
# ----------------------------------------------------------------
def __find_enclave_library(config):
"""
Find enclave library file from the parsed config
"""
enclave_file_name = config.get('enclave_library')
enclave_file_path = TCFHOME + "/" + config.get('enclave_library_path')
logger.info("Enclave Lib: %s", enclave_file_name)
if enclave_file_path:
enclave_file = os.path.join(enclave_file_path, enclave_file_name)
if os.path.exists(enclave_file):
logger.info("Enclave Lib Exists")
return enclave_file
else:
script_directory = os.path.abspath(
os.path.dirname(os.path.realpath(__file__)))
logger.info("Script directory - %s", script_directory)
search_path = [
script_directory,
os.path.abspath(os.path.join(script_directory, '..', 'lib')),
]
for path in search_path:
enclave_file = os.path.join(path, enclave_file_name)
if os.path.exists(enclave_file):
logger.info("Enclave Lib Exits")
return enclave_file
raise IOError("Could not find enclave shared object")
# -----------------------------------------------------------------
def __update_sig_rl():
"""
Update the signature revocation lists for EPID group on IAS server
"""
global _epid_group
global _sig_rl_update_time
global _sig_rl_update_period
if _epid_group is None:
_epid_group = _tcf_enclave_info.get_epid_group()
logger.info("EPID: " + _epid_group)
if not _sig_rl_update_time \
or (time.time() - _sig_rl_update_time) > _sig_rl_update_period:
sig_rl = ""
if (not enclave.is_sgx_simulator()):
sig_rl = _ias.get_signature_revocation_lists(_epid_group)
logger.debug("Received SigRl of {} bytes ".format(len(sig_rl)))
_tcf_enclave_info.set_signature_revocation_list(sig_rl)
_sig_rl_update_time = time.time()
# -----------------------------------------------------------------
def initialize_with_configuration(config):
"""
Create and Initialize a SGX enclave with passed config
"""
global _tcf_enclave_info
global _ias
global logger
enclave._SetLogger(logger)
# Ensure that the required keys are in the configuration
valid_keys = set(['spid', 'ias_url', 'ias_api_key'])
found_keys = set(config.keys())
missing_keys = valid_keys.difference(found_keys)
if missing_keys:
raise \
ValueError(
'TCF enclave config file missing the following keys: '
'{}'.format(
', '.join(sorted(list(missing_keys)))))
# IAS is not initialized in SGX SIM mode
if not _ias and not enclave.is_sgx_simulator():
_ias = \
ias_client.IasClient(
IasServer=config['ias_url'],
ApiKey=config['ias_api_key'],
Spid=config['spid'],
HttpsProxy=config.get('https_proxy', ""))
if not _tcf_enclave_info:
signed_enclave = __find_enclave_library(config)
logger.debug("Attempting to load enclave at: %s", signed_enclave)
_tcf_enclave_info = enclave.tcf_enclave_info(
signed_enclave, config['spid'], int(config['num_of_enclaves']))
logger.info("Basename: %s", get_enclave_basename())
logger.info("MRENCLAVE: %s", get_enclave_measurement())
sig_rl_updated = False
while not sig_rl_updated:
try:
__update_sig_rl()
sig_rl_updated = True
except (SSLError, Timeout, HTTPError) as e:
logger.warning(
"Failed to retrieve initial sig rl from IAS: %s", str(e))
logger.warning("Retrying in 60 sec")
time.sleep(60)
return get_enclave_basename(), get_enclave_measurement()
# -----------------------------------------------------------------
def shutdown():
global _tcf_enclave_info
global _ias
global _sig_rl_update_time
global _epid_group
_tcf_enclave_info = None
_ias = None
_sig_rl_update_time = None
_epid_group = None
# -----------------------------------------------------------------
def get_enclave_measurement():
global _tcf_enclave_info
return _tcf_enclave_info.mr_enclave \
if _tcf_enclave_info is not None else None
# -----------------------------------------------------------------
def get_enclave_basename():
global _tcf_enclave_info
return _tcf_enclave_info.basename \
if _tcf_enclave_info is not None else None
# -----------------------------------------------------------------
def verify_enclave_info(enclave_info, mr_enclave, originator_public_key_hash):
"""
Verifies enclave signup info
- enclave_info is a JSON serialised enclave signup info
along with IAS attestation report
- mr_enclave is enclave measurement value
"""
return enclave.VerifyEnclaveInfo(
enclave_info, mr_enclave, originator_public_key_hash)
# -----------------------------------------------------------------
def create_signup_info(originator_public_key_hash, nonce):
"""
Create enclave signup data
"""
# Part of what is returned with the signup data is an enclave quote, we
# want to update the revocation list first.
__update_sig_rl()
# Now, let the enclave create the signup data
signup_data = enclave.CreateEnclaveData(originator_public_key_hash)
if signup_data is None:
return None
# We don't really have any reason to call back down into the enclave
# as we have everything we now need. For other objects such as wait
# timer and certificate they are serialized into JSON down in C++ code.
#
# Start building up the signup info dictionary we will serialize
signup_info = {
'verifying_key': signup_data['verifying_key'],
'encryption_key': signup_data['encryption_key'],
'proof_data': 'Not present',
'enclave_persistent_id': 'Not present'
}
# If we are not running in the simulator, we are going to go and get
# an attestation verification report for our signup data.
if not enclave.is_sgx_simulator():
logger.debug("posting verification to IAS")
response = _ias.post_verify_attestation(
quote=signup_data['enclave_quote'], nonce=nonce)
logger.debug("posted verification to IAS")
# check verification report
if not _ias.verify_report_fields(
signup_data['enclave_quote'], response['verification_report']):
logger.debug("last error: " + _ias.last_verification_error())
if _ias.last_verification_error() == "GROUP_OUT_OF_DATE":
logger.warning("failure GROUP_OUT_OF_DATE " +
"(update your BIOS/microcode!!!) keep going")
else:
logger.error("invalid report fields")
return None
# ALL checks have passed
logger.info("report fields verified")
# Now put the proof data into the dictionary
signup_info['proof_data'] = \
json.dumps({
'verification_report': response['verification_report'],
'ias_report_signature': response['ias_signature'],
'ias_report_signing_certificate': response['ias_certificate']
})
# Grab the EPID pseudonym and put it in the enclave-persistent ID for
# the signup info
verification_report_dict = json.loads(response['verification_report'])
signup_info['enclave_persistent_id'] = \
verification_report_dict.get('epidPseudonym')
mr_enclave = get_enclave_measurement()
status = verify_enclave_info(
json.dumps(signup_info), mr_enclave, originator_public_key_hash)
if status != 0:
logger.error("Verification of enclave signup info failed")
else:
logger.info("Verification of enclave signup info passed")
# Now we can finally serialize the signup info and create a corresponding
# signup info object. Because we don't want the sealed signup data in the
# serialized version, we set it separately.
signup_info_obj = enclave.deserialize_signup_info(json.dumps(signup_info))
signup_info_obj.sealed_signup_data = signup_data['sealed_enclave_data']
# Now we can return the real object
return signup_info_obj
| 36.616541 | 79 | 0.633778 |
793ec82d159f71b5248c89b78488c74a7da6ecaa | 397 | py | Python | goal_beta1/wsgi.py | sanjeevhalyal/goals | 88216db91e2bde7eef947d3c8df1f72ef557ba43 | [
"MIT"
] | null | null | null | goal_beta1/wsgi.py | sanjeevhalyal/goals | 88216db91e2bde7eef947d3c8df1f72ef557ba43 | [
"MIT"
] | null | null | null | goal_beta1/wsgi.py | sanjeevhalyal/goals | 88216db91e2bde7eef947d3c8df1f72ef557ba43 | [
"MIT"
] | null | null | null | """
WSGI config for goal_beta1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "goal_beta1.settings")
application = get_wsgi_application()
| 23.352941 | 78 | 0.788413 |
793ec973e7a1c53ee0749ae44e09efa5243c26a4 | 2,459 | py | Python | day16/day16.py | ecly/adventofcode2020 | bd284c5d9e2badbd923ae85cad9f3305d92ee1d4 | [
"MIT"
] | null | null | null | day16/day16.py | ecly/adventofcode2020 | bd284c5d9e2badbd923ae85cad9f3305d92ee1d4 | [
"MIT"
] | null | null | null | day16/day16.py | ecly/adventofcode2020 | bd284c5d9e2badbd923ae85cad9f3305d92ee1d4 | [
"MIT"
] | null | null | null | import re
import math
from itertools import takewhile
from collections import defaultdict, Counter
def part1(rules, nearby):
ranges = re.findall("\d+-\d+", rules)
ranges = [tuple(map(int, m.split("-"))) for m in ranges]
error_rate = 0
valid_tickets = []
for l in nearby:
for v in l.split(","):
if not any(l <= int(v) <= h for l, h in ranges):
error_rate += int(v)
else:
valid_tickets.append(l)
return error_rate, valid_tickets
def deduce_mapping(fields):
fields = fields.copy()
mapping = dict()
while fields:
discard = set()
for field, matches in fields.items():
counter = Counter(matches)
common = counter.most_common()
max_k, max_v = common[0]
candidates = list(takewhile(lambda x: x[1] >= max_v, common))
# if we only have 1 candidate for this field, we assign it
if len(candidates) == 1:
mapping[field] = max_k
discard.add(max_k)
# remove categories and field ids that are assigned
fields = {
k: [v for v in l if v not in discard]
for k, l in fields.items()
if k not in mapping
}
return mapping
def part2(rules, mine, nearby):
ranges = dict()
for l in rules.split("\n"):
k, r = l.split(": ")
r1, r2 = r.split(" or ")
r1 = tuple(map(int, r1.split("-")))
r2 = tuple(map(int, r2.split("-")))
ranges[k] = (r1, r2)
fields = defaultdict(list)
for n in nearby:
for i, v in enumerate(map(int, n.split(","))):
for r, ((l1, h1), (l2, h2)) in ranges.items():
if l1 <= v <= h1:
fields[i].append(r)
elif l2 <= v <= h2:
fields[i].append(r)
mapping = deduce_mapping(fields)
departure_ids = [f for f, name in mapping.items() if name.startswith("departure")]
mine = list(map(int, mine.split(",")))
return math.prod(v for idx, v in enumerate(mine) if idx in departure_ids)
def main():
with open("input") as f:
rules, mine, nearby = f.read().strip().split("\n\n")
mine = mine.split("\n")[1]
nearby = nearby.split("\n")[1:]
error_rate, valid_tickets = part1(rules, nearby)
print(error_rate)
print(part2(rules, mine, valid_tickets))
if __name__ == "__main__":
main()
| 28.929412 | 86 | 0.54209 |
793eca5010dab66c6d4c579003e1a7a8415f2c96 | 27,882 | py | Python | autocalibration/lib/python2.7/site-packages/mpl_toolkits/axes_grid1/colorbar.py | prcalopa/reactable-autocalibration | eb67a5b5ee0e50f1effa773f6f3f934b5fda6fcf | [
"MIT"
] | 5 | 2017-11-15T10:33:42.000Z | 2021-11-16T02:21:31.000Z | mpl_toolkits/axes_grid1/colorbar.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 2 | 2017-10-28T03:30:26.000Z | 2017-10-28T03:31:00.000Z | mpl_toolkits/axes_grid1/colorbar.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 6 | 2017-11-30T00:34:20.000Z | 2021-05-20T02:58:02.000Z | '''
Colorbar toolkit with two classes and a function:
:class:`ColorbarBase`
the base class with full colorbar drawing functionality.
It can be used as-is to make a colorbar for a given colormap;
a mappable object (e.g., image) is not needed.
:class:`Colorbar`
the derived class for use with images or contour plots.
:func:`make_axes`
a function for resizing an axes and adding a second axes
suitable for a colorbar
The :meth:`~matplotlib.figure.Figure.colorbar` method uses :func:`make_axes`
and :class:`Colorbar`; the :func:`~matplotlib.pyplot.colorbar` function
is a thin wrapper over :meth:`~matplotlib.figure.Figure.colorbar`.
'''
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange, zip
import numpy as np
import matplotlib as mpl
import matplotlib.colors as colors
import matplotlib.cm as cm
from matplotlib import docstring
import matplotlib.ticker as ticker
import matplotlib.cbook as cbook
import matplotlib.collections as collections
import matplotlib.contour as contour
from matplotlib.path import Path
from matplotlib.patches import PathPatch
from matplotlib.transforms import Bbox
make_axes_kw_doc = '''
============= ====================================================
Property Description
============= ====================================================
*orientation* vertical or horizontal
*fraction* 0.15; fraction of original axes to use for colorbar
*pad* 0.05 if vertical, 0.15 if horizontal; fraction
of original axes between colorbar and new image axes
*shrink* 1.0; fraction by which to shrink the colorbar
*aspect* 20; ratio of long to short dimensions
============= ====================================================
'''
colormap_kw_doc = '''
=========== ====================================================
Property Description
=========== ====================================================
*extend* [ 'neither' | 'both' | 'min' | 'max' ]
If not 'neither', make pointed end(s) for out-of-
range values. These are set for a given colormap
using the colormap set_under and set_over methods.
*spacing* [ 'uniform' | 'proportional' ]
Uniform spacing gives each discrete color the same
space; proportional makes the space proportional to
the data interval.
*ticks* [ None | list of ticks | Locator object ]
If None, ticks are determined automatically from the
input.
*format* [ None | format string | Formatter object ]
If None, the
:class:`~matplotlib.ticker.ScalarFormatter` is used.
If a format string is given, e.g., '%.3f', that is
used. An alternative
:class:`~matplotlib.ticker.Formatter` object may be
given instead.
*drawedges* [ False | True ] If true, draw lines at color
boundaries.
=========== ====================================================
The following will probably be useful only in the context of
indexed colors (that is, when the mappable has norm=NoNorm()),
or other unusual circumstances.
============ ===================================================
Property Description
============ ===================================================
*boundaries* None or a sequence
*values* None or a sequence which must be of length 1 less
than the sequence of *boundaries*. For each region
delimited by adjacent entries in *boundaries*, the
color mapped to the corresponding value in values
will be used.
============ ===================================================
'''
colorbar_doc = '''
Add a colorbar to a plot.
Function signatures for the :mod:`~matplotlib.pyplot` interface; all
but the first are also method signatures for the
:meth:`~matplotlib.figure.Figure.colorbar` method::
colorbar(**kwargs)
colorbar(mappable, **kwargs)
colorbar(mappable, cax=cax, **kwargs)
colorbar(mappable, ax=ax, **kwargs)
arguments:
*mappable*
the :class:`~matplotlib.image.Image`,
:class:`~matplotlib.contour.ContourSet`, etc. to
which the colorbar applies; this argument is mandatory for the
:meth:`~matplotlib.figure.Figure.colorbar` method but optional for the
:func:`~matplotlib.pyplot.colorbar` function, which sets the
default to the current image.
keyword arguments:
*cax*
None | axes object into which the colorbar will be drawn
*ax*
None | parent axes object from which space for a new
colorbar axes will be stolen
Additional keyword arguments are of two kinds:
axes properties:
%s
colorbar properties:
%s
If *mappable* is a :class:`~matplotlib.contours.ContourSet`, its *extend*
kwarg is included automatically.
Note that the *shrink* kwarg provides a simple way to keep a vertical
colorbar, for example, from being taller than the axes of the mappable
to which the colorbar is attached; but it is a manual method requiring
some trial and error. If the colorbar is too tall (or a horizontal
colorbar is too wide) use a smaller value of *shrink*.
For more precise control, you can manually specify the positions of
the axes objects in which the mappable and the colorbar are drawn. In
this case, do not use any of the axes properties kwargs.
It is known that some vector graphics viewer (svg and pdf) renders white gaps
between segments of the colorbar. This is due to bugs in the viewers not
matplotlib. As a workaround the colorbar can be rendered with overlapping
segments::
cbar = colorbar()
cbar.solids.set_edgecolor("face")
draw()
However this has negative consequences in other circumstances. Particularly with
semi transparent images (alpha < 1) and colorbar extensions and is not enabled
by default see (issue #1188).
returns:
:class:`~matplotlib.colorbar.Colorbar` instance; see also its base class,
:class:`~matplotlib.colorbar.ColorbarBase`. Call the
:meth:`~matplotlib.colorbar.ColorbarBase.set_label` method
to label the colorbar.
The transData of the *cax* is adjusted so that the limits in the
longest axis actually corresponds to the limits in colorbar range. On
the other hand, the shortest axis has a data limits of [1,2], whose
unconventional value is to prevent underflow when log scale is used.
''' % (make_axes_kw_doc, colormap_kw_doc)
docstring.interpd.update(colorbar_doc=colorbar_doc)
class CbarAxesLocator(object):
"""
CbarAxesLocator is a axes_locator for colorbar axes. It adjust the
position of the axes to make a room for extended ends, i.e., the
extended ends are located outside the axes area.
"""
def __init__(self, locator=None, extend="neither", orientation="vertical"):
"""
*locator* : the bbox returned from the locator is used as a
initial axes location. If None, axes.bbox is used.
*extend* : same as in ColorbarBase
*orientation* : same as in ColorbarBase
"""
self._locator = locator
self.extesion_fraction = 0.05
self.extend = extend
self.orientation = orientation
def get_original_position(self, axes, renderer):
"""
get the original position of the axes.
"""
if self._locator is None:
bbox = axes.get_position(original=True)
else:
bbox = self._locator(axes, renderer)
return bbox
def get_end_vertices(self):
"""
return a tuple of two vertices for the colorbar extended ends.
The first vertices is for the minimum end, and the second is for
the maximum end.
"""
# Note that concatenating two vertices needs to make a
# vertices for the frame.
extesion_fraction = self.extesion_fraction
corx = extesion_fraction*2.
cory = 1./(1. - corx)
x1, y1, w, h = 0, 0, 1, 1
x2, y2 = x1 + w, y1 + h
dw, dh = w*extesion_fraction, h*extesion_fraction*cory
if self.extend in ["min", "both"]:
bottom = [(x1, y1),
(x1+w/2., y1-dh),
(x2, y1)]
else:
bottom = [(x1, y1),
(x2, y1)]
if self.extend in ["max", "both"]:
top = [(x2, y2),
(x1+w/2., y2+dh),
(x1, y2)]
else:
top = [(x2, y2),
(x1, y2)]
if self.orientation == "horizontal":
bottom = [(y,x) for (x,y) in bottom]
top = [(y,x) for (x,y) in top]
return bottom, top
def get_path_patch(self):
"""
get the path for axes patch
"""
end1, end2 = self.get_end_vertices()
verts = [] + end1 + end2 + end1[:1]
return Path(verts)
def get_path_ends(self):
"""
get the paths for extended ends
"""
end1, end2 = self.get_end_vertices()
return Path(end1), Path(end2)
def __call__(self, axes, renderer):
"""
Return the adjusted position of the axes
"""
bbox0 = self.get_original_position(axes, renderer)
bbox = bbox0
x1, y1, w, h = bbox.bounds
extesion_fraction = self.extesion_fraction
dw, dh = w*extesion_fraction, h*extesion_fraction
if self.extend in ["min", "both"]:
if self.orientation == "horizontal":
x1 = x1 + dw
else:
y1 = y1+dh
if self.extend in ["max", "both"]:
if self.orientation == "horizontal":
w = w-2*dw
else:
h = h-2*dh
return Bbox.from_bounds(x1, y1, w, h)
class ColorbarBase(cm.ScalarMappable):
'''
Draw a colorbar in an existing axes.
This is a base class for the :class:`Colorbar` class, which is the
basis for the :func:`~matplotlib.pyplot.colorbar` method and pylab
function.
It is also useful by itself for showing a colormap. If the *cmap*
kwarg is given but *boundaries* and *values* are left as None,
then the colormap will be displayed on a 0-1 scale. To show the
under- and over-value colors, specify the *norm* as::
colors.Normalize(clip=False)
To show the colors versus index instead of on the 0-1 scale,
use::
norm=colors.NoNorm.
Useful attributes:
:attr:`ax`
the Axes instance in which the colorbar is drawn
:attr:`lines`
a LineCollection if lines were drawn, otherwise None
:attr:`dividers`
a LineCollection if *drawedges* is True, otherwise None
Useful public methods are :meth:`set_label` and :meth:`add_lines`.
'''
def __init__(self, ax, cmap=None,
norm=None,
alpha=1.0,
values=None,
boundaries=None,
orientation='vertical',
extend='neither',
spacing='uniform', # uniform or proportional
ticks=None,
format=None,
drawedges=False,
filled=True,
):
self.ax = ax
if cmap is None: cmap = cm.get_cmap()
if norm is None: norm = colors.Normalize()
self.alpha = alpha
cm.ScalarMappable.__init__(self, cmap=cmap, norm=norm)
self.values = values
self.boundaries = boundaries
self.extend = extend
self.spacing = spacing
self.orientation = orientation
self.drawedges = drawedges
self.filled = filled
# artists
self.solids = None
self.lines = None
self.dividers = None
self.extension_patch1 = None
self.extension_patch2 = None
if orientation == "vertical":
self.cbar_axis = self.ax.yaxis
else:
self.cbar_axis = self.ax.xaxis
if format is None:
if isinstance(self.norm, colors.LogNorm):
# change both axis for proper aspect
self.ax.set_xscale("log")
self.ax.set_yscale("log")
self.cbar_axis.set_minor_locator(ticker.NullLocator())
formatter = ticker.LogFormatter()
else:
formatter = None
elif isinstance(format, six.string_types):
formatter = ticker.FormatStrFormatter(format)
else:
formatter = format # Assume it is a Formatter
if formatter is None:
formatter = self.cbar_axis.get_major_formatter()
else:
self.cbar_axis.set_major_formatter(formatter)
if cbook.iterable(ticks):
self.cbar_axis.set_ticks(ticks)
elif ticks is not None:
self.cbar_axis.set_major_locator(ticks)
else:
self._select_locator(formatter)
self._config_axes()
self.update_artists()
self.set_label_text('')
def _get_colorbar_limits(self):
"""
initial limits for colorbar range. The returned min, max values
will be used to create colorbar solid(?) and etc.
"""
if self.boundaries is not None:
C = self.boundaries
if self.extend in ["min", "both"]:
C = C[1:]
if self.extend in ["max", "both"]:
C = C[:-1]
return min(C), max(C)
else:
return self.get_clim()
def _config_axes(self):
'''
Adjust the properties of the axes to be adequate for colorbar display.
'''
ax = self.ax
axes_locator = CbarAxesLocator(ax.get_axes_locator(),
extend=self.extend,
orientation=self.orientation)
ax.set_axes_locator(axes_locator)
# override the get_data_ratio for the aspect works.
def _f():
return 1.
ax.get_data_ratio = _f
ax.get_data_ratio_log = _f
ax.set_frame_on(True)
ax.set_navigate(False)
self.ax.set_autoscalex_on(False)
self.ax.set_autoscaley_on(False)
if self.orientation == 'horizontal':
ax.xaxis.set_label_position('bottom')
ax.set_yticks([])
else:
ax.set_xticks([])
ax.yaxis.set_label_position('right')
ax.yaxis.set_ticks_position('right')
def update_artists(self):
"""
Update the colorbar associated artists, *filled* and
*ends*. Note that *lines* are not updated. This needs to be
called whenever clim of associated image changes.
"""
self._process_values()
self._add_ends()
X, Y = self._mesh()
if self.filled:
C = self._values[:,np.newaxis]
self._add_solids(X, Y, C)
ax = self.ax
vmin, vmax = self._get_colorbar_limits()
if self.orientation == 'horizontal':
ax.set_ylim(1, 2)
ax.set_xlim(vmin, vmax)
else:
ax.set_xlim(1, 2)
ax.set_ylim(vmin, vmax)
def _add_ends(self):
"""
Create patches from extended ends and add them to the axes.
"""
del self.extension_patch1
del self.extension_patch2
path1, path2 = self.ax.get_axes_locator().get_path_ends()
fc=mpl.rcParams['axes.facecolor']
ec=mpl.rcParams['axes.edgecolor']
linewidths=0.5*mpl.rcParams['axes.linewidth']
self.extension_patch1 = PathPatch(path1,
fc=fc, ec=ec, lw=linewidths,
zorder=2.,
transform=self.ax.transAxes,
clip_on=False)
self.extension_patch2 = PathPatch(path2,
fc=fc, ec=ec, lw=linewidths,
zorder=2.,
transform=self.ax.transAxes,
clip_on=False)
self.ax.add_artist(self.extension_patch1)
self.ax.add_artist(self.extension_patch2)
def _set_label_text(self):
"""
set label.
"""
self.cbar_axis.set_label_text(self._label, **self._labelkw)
def set_label_text(self, label, **kw):
'''
Label the long axis of the colorbar
'''
self._label = label
self._labelkw = kw
self._set_label_text()
def _edges(self, X, Y):
'''
Return the separator line segments; helper for _add_solids.
'''
N = X.shape[0]
# Using the non-array form of these line segments is much
# simpler than making them into arrays.
if self.orientation == 'vertical':
return [list(zip(X[i], Y[i])) for i in xrange(1, N-1)]
else:
return [list(zip(Y[i], X[i])) for i in xrange(1, N-1)]
def _add_solids(self, X, Y, C):
'''
Draw the colors using :meth:`~matplotlib.axes.Axes.pcolormesh`;
optionally add separators.
'''
## Change to pcolorfast after fixing bugs in some backends...
if self.extend in ["min", "both"]:
cc = self.to_rgba([C[0][0]])
self.extension_patch1.set_fc(cc[0])
X, Y, C = X[1:], Y[1:], C[1:]
if self.extend in ["max", "both"]:
cc = self.to_rgba([C[-1][0]])
self.extension_patch2.set_fc(cc[0])
X, Y, C = X[:-1], Y[:-1], C[:-1]
if self.orientation == 'vertical':
args = (X, Y, C)
else:
args = (np.transpose(Y), np.transpose(X), np.transpose(C))
kw = {'cmap':self.cmap, 'norm':self.norm,
'shading':'flat', 'alpha':self.alpha,
}
del self.solids
del self.dividers
col = self.ax.pcolormesh(*args, **kw)
self.solids = col
if self.drawedges:
self.dividers = collections.LineCollection(self._edges(X,Y),
colors=(mpl.rcParams['axes.edgecolor'],),
linewidths=(0.5*mpl.rcParams['axes.linewidth'],),
)
self.ax.add_collection(self.dividers)
else:
self.dividers = None
def add_lines(self, levels, colors, linewidths):
'''
Draw lines on the colorbar. It deletes preexisting lines.
'''
del self.lines
N = len(levels)
x = np.array([1.0, 2.0])
X, Y = np.meshgrid(x,levels)
if self.orientation == 'vertical':
xy = [list(zip(X[i], Y[i])) for i in xrange(N)]
else:
xy = [list(zip(Y[i], X[i])) for i in xrange(N)]
col = collections.LineCollection(xy, linewidths=linewidths,
)
self.lines = col
col.set_color(colors)
self.ax.add_collection(col)
def _select_locator(self, formatter):
'''
select a suitable locator
'''
if self.boundaries is None:
if isinstance(self.norm, colors.NoNorm):
nv = len(self._values)
base = 1 + int(nv/10)
locator = ticker.IndexLocator(base=base, offset=0)
elif isinstance(self.norm, colors.BoundaryNorm):
b = self.norm.boundaries
locator = ticker.FixedLocator(b, nbins=10)
elif isinstance(self.norm, colors.LogNorm):
locator = ticker.LogLocator()
else:
locator = ticker.MaxNLocator(nbins=5)
else:
b = self._boundaries[self._inside]
locator = ticker.FixedLocator(b) #, nbins=10)
self.cbar_axis.set_major_locator(locator)
def _process_values(self, b=None):
'''
Set the :attr:`_boundaries` and :attr:`_values` attributes
based on the input boundaries and values. Input boundaries
can be *self.boundaries* or the argument *b*.
'''
if b is None:
b = self.boundaries
if b is not None:
self._boundaries = np.asarray(b, dtype=float)
if self.values is None:
self._values = 0.5*(self._boundaries[:-1]
+ self._boundaries[1:])
if isinstance(self.norm, colors.NoNorm):
self._values = (self._values + 0.00001).astype(np.int16)
return
self._values = np.array(self.values)
return
if self.values is not None:
self._values = np.array(self.values)
if self.boundaries is None:
b = np.zeros(len(self.values)+1, 'd')
b[1:-1] = 0.5*(self._values[:-1] - self._values[1:])
b[0] = 2.0*b[1] - b[2]
b[-1] = 2.0*b[-2] - b[-3]
self._boundaries = b
return
self._boundaries = np.array(self.boundaries)
return
# Neither boundaries nor values are specified;
# make reasonable ones based on cmap and norm.
if isinstance(self.norm, colors.NoNorm):
b = self._uniform_y(self.cmap.N+1) * self.cmap.N - 0.5
v = np.zeros((len(b)-1,), dtype=np.int16)
v = np.arange(self.cmap.N, dtype=np.int16)
self._boundaries = b
self._values = v
return
elif isinstance(self.norm, colors.BoundaryNorm):
b = np.array(self.norm.boundaries)
v = np.zeros((len(b)-1,), dtype=float)
bi = self.norm.boundaries
v = 0.5*(bi[:-1] + bi[1:])
self._boundaries = b
self._values = v
return
else:
b = self._uniform_y(self.cmap.N+1)
self._process_values(b)
def _uniform_y(self, N):
'''
Return colorbar data coordinates for *N* uniformly
spaced boundaries.
'''
vmin, vmax = self._get_colorbar_limits()
if isinstance(self.norm, colors.LogNorm):
y = np.logspace(np.log10(vmin), np.log10(vmax), N)
else:
y = np.linspace(vmin, vmax, N)
return y
def _mesh(self):
'''
Return X,Y, the coordinate arrays for the colorbar pcolormesh.
These are suitable for a vertical colorbar; swapping and
transposition for a horizontal colorbar are done outside
this function.
'''
x = np.array([1.0, 2.0])
if self.spacing == 'uniform':
y = self._uniform_y(len(self._boundaries))
else:
y = self._boundaries
self._y = y
X, Y = np.meshgrid(x,y)
return X, Y
def set_alpha(self, alpha):
"""
set alpha value.
"""
self.alpha = alpha
class Colorbar(ColorbarBase):
def __init__(self, ax, mappable, **kw):
mappable.autoscale_None() # Ensure mappable.norm.vmin, vmax
# are set when colorbar is called,
# even if mappable.draw has not yet
# been called. This will not change
# vmin, vmax if they are already set.
self.mappable = mappable
kw['cmap'] = mappable.cmap
kw['norm'] = mappable.norm
kw['alpha'] = mappable.get_alpha()
if isinstance(mappable, contour.ContourSet):
CS = mappable
kw['boundaries'] = CS._levels
kw['values'] = CS.cvalues
kw['extend'] = CS.extend
#kw['ticks'] = CS._levels
kw.setdefault('ticks', ticker.FixedLocator(CS.levels, nbins=10))
kw['filled'] = CS.filled
ColorbarBase.__init__(self, ax, **kw)
if not CS.filled:
self.add_lines(CS)
else:
ColorbarBase.__init__(self, ax, **kw)
def add_lines(self, CS):
'''
Add the lines from a non-filled
:class:`~matplotlib.contour.ContourSet` to the colorbar.
'''
if not isinstance(CS, contour.ContourSet) or CS.filled:
raise ValueError('add_lines is only for a ContourSet of lines')
tcolors = [c[0] for c in CS.tcolors]
tlinewidths = [t[0] for t in CS.tlinewidths]
# The following was an attempt to get the colorbar lines
# to follow subsequent changes in the contour lines,
# but more work is needed: specifically, a careful
# look at event sequences, and at how
# to make one object track another automatically.
#tcolors = [col.get_colors()[0] for col in CS.collections]
#tlinewidths = [col.get_linewidth()[0] for lw in CS.collections]
#print 'tlinewidths:', tlinewidths
ColorbarBase.add_lines(self, CS.levels, tcolors, tlinewidths)
def update_bruteforce(self, mappable):
"""
Update the colorbar artists to reflect the change of the
associated mappable.
"""
self.update_artists()
if isinstance(mappable, contour.ContourSet):
if not mappable.filled:
self.add_lines(mappable)
@docstring.Substitution(make_axes_kw_doc)
def make_axes(parent, **kw):
'''
Resize and reposition a parent axes, and return a child
axes suitable for a colorbar::
cax, kw = make_axes(parent, **kw)
Keyword arguments may include the following (with defaults):
*orientation*
'vertical' or 'horizontal'
%s
All but the first of these are stripped from the input kw set.
Returns (cax, kw), the child axes and the reduced kw dictionary.
'''
orientation = kw.setdefault('orientation', 'vertical')
fraction = kw.pop('fraction', 0.15)
shrink = kw.pop('shrink', 1.0)
aspect = kw.pop('aspect', 20)
#pb = transforms.PBox(parent.get_position())
pb = parent.get_position(original=True).frozen()
if orientation == 'vertical':
pad = kw.pop('pad', 0.05)
x1 = 1.0-fraction
pb1, pbx, pbcb = pb.splitx(x1-pad, x1)
pbcb = pbcb.shrunk(1.0, shrink).anchored('C', pbcb)
anchor = (0.0, 0.5)
panchor = (1.0, 0.5)
else:
pad = kw.pop('pad', 0.15)
pbcb, pbx, pb1 = pb.splity(fraction, fraction+pad)
pbcb = pbcb.shrunk(shrink, 1.0).anchored('C', pbcb)
aspect = 1.0/aspect
anchor = (0.5, 1.0)
panchor = (0.5, 0.0)
parent.set_position(pb1)
parent.set_anchor(panchor)
fig = parent.get_figure()
cax = fig.add_axes(pbcb)
cax.set_aspect(aspect, anchor=anchor, adjustable='box')
return cax, kw
def colorbar(mappable, cax=None, ax=None, **kw):
"""
Create a colorbar for a ScalarMappable instance.
Documentation for the pylab thin wrapper:
%(colorbar_doc)s
"""
import matplotlib.pyplot as plt
if ax is None:
ax = plt.gca()
if cax is None:
cax, kw = make_axes(ax, **kw)
cax._hold = True
cb = Colorbar(cax, mappable, **kw)
def on_changed(m):
cb.set_cmap(m.get_cmap())
cb.set_clim(m.get_clim())
cb.update_bruteforce(m)
cbid = mappable.callbacksSM.connect('changed', on_changed)
mappable.colorbar = cb
ax.figure.sca(ax)
return cb
| 33.391617 | 80 | 0.559393 |
793eccbba8d5ccb79a4f8224f1fdb7789a9c8faf | 2,934 | py | Python | lab_3/code/table.py | Winterpuma/bmstu_experiment-planning | 8d0ef45432c4d058a210d896ec3342b6b8b4182d | [
"MIT"
] | 1 | 2021-12-07T09:48:06.000Z | 2021-12-07T09:48:06.000Z | lab_3/code/table.py | Winterpuma/bmstu_experiment-planning | 8d0ef45432c4d058a210d896ec3342b6b8b4182d | [
"MIT"
] | null | null | null | lab_3/code/table.py | Winterpuma/bmstu_experiment-planning | 8d0ef45432c4d058a210d896ec3342b6b8b4182d | [
"MIT"
] | null | null | null | from os import startfile
from openpyxl import Workbook, load_workbook
from openpyxl.styles import PatternFill, Font
names_arr_full = ['x0', 'x1', 'x2', 'x3', 'x4',
'x5', 'x6', 'x7', 'x8',
'x1x2', 'x1x3', 'x1x4', 'x1x5', 'x1x6', 'x1x7', 'x1x8',
'x2x3', 'x2x4', 'x2x5', 'x2x6', 'x2x7', 'x2x8',
'x3x4', 'x3x5', 'x3x6', 'x3x7', 'x3x8',
'x4x5', 'x4x6', 'x4x7', 'x4x8',
'x5x6', 'x5x7', 'x5x8',
'x6x7', 'x6x8',
'x7x8',
'Y', 'Yл', 'Yнл', '|Y - Yл|', '|Y - Yнл|'
]
mainFont = Font(name='Calibri', size=11, bold=True, color='FF000000')
secondaryFont = Font(name='Calibri', size=11, color='FF000000')
headerFill = PatternFill(start_color='B1A0C7', end_color='B1A0C7', fill_type='solid')
greyFill = PatternFill(start_color='D9D9D9', end_color='D9D9D9', fill_type='solid')
purpleFill = PatternFill(start_color='CCC0DA', end_color='CCC0DA', fill_type='solid')
lightgreyFill = PatternFill(start_color='F2F2F2', end_color='F2F2F2', fill_type='solid')
lightpurpleFill = PatternFill(start_color='E4DFEC', end_color='E4DFEC', fill_type='solid')
# Номера столбцов в таблице
START_POS = 'A'
MIDDLE_LEFT_POS = 'AK'
MIDDLE_RIGHT_POS = 'AL'
END_POS = 'AP'
class ExcelTable():
filename = 'table.xlsx'
error_msg = 'Ошибка: не удалось записать данные.\nНет доступа к файлу %s. Пожалуйста, закройте его.' % filename
def __init__(self, filename):
self.filename = filename
def create(self, table):
wb_write = Workbook()
ws = wb_write.active
ws.append(names_arr_full)
rows_count = len(table)
for i in range(rows_count):
table_len = len(table[i])
string = []
for j in range(table_len + 1):
if j == 0:
continue
elif j < table_len - 4:
string.append(table[i][j - 1])
else:
string.append('%.4f' % table[i][j - 1])
ws.append(string)
self.set_fill_and_font_rows(ws['%s1:%s1' % (START_POS, END_POS)], mainFont, headerFill)
self.set_fill_and_font_rows(ws['%s2:%s%d' % (START_POS, MIDDLE_LEFT_POS, rows_count + 1)], secondaryFont, greyFill)
self.set_fill_and_font_rows(ws['%s2:%s%d' % (MIDDLE_RIGHT_POS, END_POS, rows_count + 1)], secondaryFont, purpleFill)
try:
wb_write.save(self.filename)
except PermissionError:
print(self.error_msg)
def add_one_row(self, row):
wb_write = load_workbook(self.filename)
ws = wb_write.active
row_len = len(row)
for i in range(row_len):
if i > row_len - 6:
row[i] = '%.4f' % row[i]
ws.append(row)
max_row = ws.max_row
self.set_fill_and_font_rows(ws['A%d:AK%d' % (max_row, max_row)], secondaryFont, lightgreyFill)
self.set_fill_and_font_rows(ws['AL%d:AP%d' % (max_row, max_row)], secondaryFont, lightpurpleFill)
try:
wb_write.save(self.filename)
except PermissionError:
print(self.error_msg)
def open(self):
startfile(self.filename)
def set_fill_and_font_rows(self, turple, font, fill):
for row in turple:
for el in row:
el.fill = fill
el.font = font
| 29.938776 | 118 | 0.669734 |
793ecdd02d850ce431d1595a800478a5903dbd97 | 103 | py | Python | localusers/admin.py | Duskhorizon/discoplaytogether | e74a11b0f65d14db6f15d1bb0536411dd546eda6 | [
"MIT"
] | null | null | null | localusers/admin.py | Duskhorizon/discoplaytogether | e74a11b0f65d14db6f15d1bb0536411dd546eda6 | [
"MIT"
] | null | null | null | localusers/admin.py | Duskhorizon/discoplaytogether | e74a11b0f65d14db6f15d1bb0536411dd546eda6 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import DiscoServer
admin.site.register(DiscoServer)
| 20.6 | 33 | 0.805825 |
793ecef6e45e162b225a1aeb8ae3ba006a3d7ad0 | 12,967 | py | Python | celery/app/amqp.py | aleszoulek/celery | a179038fec68808d50c0a1f42aa26d315a3817ad | [
"BSD-3-Clause"
] | 2 | 2021-04-30T02:01:12.000Z | 2022-02-18T05:30:28.000Z | celery/app/amqp.py | aleszoulek/celery | a179038fec68808d50c0a1f42aa26d315a3817ad | [
"BSD-3-Clause"
] | null | null | null | celery/app/amqp.py | aleszoulek/celery | a179038fec68808d50c0a1f42aa26d315a3817ad | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
celery.app.amqp
===============
AMQ related functionality.
:copyright: (c) 2009 - 2010 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime, timedelta
from kombu import BrokerConnection, Exchange
from kombu.connection import Resource
from kombu import compat as messaging
from celery import routes as _routes
from celery import signals
from celery.utils import gen_unique_id, textindent, cached_property
from celery.utils import promise, maybe_promise
from celery.utils.compat import UserDict
#: List of known options to a Kombu producers send method.
#: Used to extract the message related options out of any `dict`.
MSG_OPTIONS = ("mandatory", "priority", "immediate", "routing_key",
"serializer", "delivery_mode", "compression")
#: Human readable queue declaration.
QUEUE_FORMAT = """
. %(name)s exchange:%(exchange)s (%(exchange_type)s) \
binding:%(binding_key)s
"""
#: Set of exchange names that have already been declared.
_exchanges_declared = set()
#: Set of queue names that have already been declared.
_queues_declared = set()
def extract_msg_options(options, keep=MSG_OPTIONS):
"""Extracts known options to `basic_publish` from a dict,
and returns a new dict."""
return dict((name, options.get(name)) for name in keep)
class Queues(UserDict):
"""Queue name⇒ declaration mapping.
Celery will consult this mapping to find the options
for any queue by name.
:param queues: Initial mapping.
"""
def __init__(self, queues):
self.data = {}
for queue_name, options in (queues or {}).items():
self.add(queue_name, **options)
def add(self, queue, exchange=None, routing_key=None,
exchange_type="direct", **options):
"""Add new queue.
:param queue: Name of the queue.
:keyword exchange: Name of the exchange.
:keyword routing_key: Binding key.
:keyword exchange_type: Type of exchange.
:keyword \*\*options: Additional declaration options.
"""
q = self[queue] = self.options(exchange, routing_key,
exchange_type, **options)
return q
def options(self, exchange, routing_key,
exchange_type="direct", **options):
"""Creates new option mapping for queue, with required
keys present."""
return dict(options, routing_key=routing_key,
binding_key=routing_key,
exchange=exchange,
exchange_type=exchange_type)
def format(self, indent=0, indent_first=True):
"""Format routing table into string for log dumps."""
info = [QUEUE_FORMAT.strip() % dict(
name=(name + ":").ljust(12), **config)
for name, config in self.items()]
if indent_first:
return textindent("\n".join(info), indent)
return info[0] + "\n" + textindent("\n".join(info[1:]), indent)
def select_subset(self, wanted, create_missing=True):
"""Select subset of the currently defined queues.
Does not return anything: queues not in `wanted` will
be discarded in-place.
:param wanted: List of wanted queue names.
:keyword create_missing: By default any unknown queues will be
added automatically, but if disabled
the occurrence of unknown queues
in `wanted` will raise :exc:`KeyError`.
"""
acc = {}
for queue in wanted:
try:
options = self[queue]
except KeyError:
if not create_missing:
raise
options = self.options(queue, queue)
acc[queue] = options
self.data.clear()
self.data.update(acc)
@classmethod
def with_defaults(cls, queues, default_exchange, default_exchange_type):
"""Alternate constructor that adds default exchange and
exchange type information to queues that does not have any."""
for opts in queues.values():
opts.setdefault("exchange", default_exchange),
opts.setdefault("exchange_type", default_exchange_type)
opts.setdefault("binding_key", default_exchange)
opts.setdefault("routing_key", opts.get("binding_key"))
return cls(queues)
class TaskPublisher(messaging.Publisher):
auto_declare = True
retry = False
retry_policy = None
def __init__(self, *args, **kwargs):
self.app = kwargs.pop("app")
self.retry = kwargs.pop("retry", self.retry)
self.retry_policy = kwargs.pop("retry_policy",
self.retry_policy or {})
super(TaskPublisher, self).__init__(*args, **kwargs)
def declare(self):
if self.exchange.name and \
self.exchange.name not in _exchanges_declared:
super(TaskPublisher, self).declare()
_exchanges_declared.add(self.exchange.name)
def _declare_queue(self, name, retry=False, retry_policy={}):
options = self.app.queues[name]
queue = messaging.entry_to_queue(name, **options)(self.channel)
if retry:
self.connection.ensure(queue, queue.declare, **retry_policy)()
else:
queue.declare()
return queue
def _declare_exchange(self, name, type, retry=False, retry_policy={}):
ex = Exchange(name, type=type, durable=self.durable,
auto_delete=self.auto_delete)(self.channel)
if retry:
return self.connection.ensure(ex, ex.declare, **retry_policy)
return ex.declare()
def delay_task(self, task_name, task_args=None, task_kwargs=None,
countdown=None, eta=None, task_id=None, taskset_id=None,
expires=None, exchange=None, exchange_type=None,
event_dispatcher=None, retry=None, retry_policy=None,
queue=None, now=None, retries=0, **kwargs):
"""Send task message."""
connection = self.connection
_retry_policy = self.retry_policy
if retry_policy: # merge default and custom policy
_retry_policy = dict(_retry_policy, **retry_policy)
# declare entities
if queue and queue not in _queues_declared:
entity = self._declare_queue(queue, retry, _retry_policy)
_exchanges_declared.add(entity.exchange.name)
_queues_declared.add(entity.name)
if exchange and exchange not in _exchanges_declared:
self._declare_exchange(exchange,
exchange_type or self.exchange_type, retry, _retry_policy)
_exchanges_declared.add(exchange)
task_id = task_id or gen_unique_id()
task_args = task_args or []
task_kwargs = task_kwargs or {}
if not isinstance(task_args, (list, tuple)):
raise ValueError("task args must be a list or tuple")
if not isinstance(task_kwargs, dict):
raise ValueError("task kwargs must be a dictionary")
if countdown: # Convert countdown to ETA.
now = now or datetime.now()
eta = now + timedelta(seconds=countdown)
if isinstance(expires, int):
now = now or datetime.now()
expires = now + timedelta(seconds=expires)
eta = eta and eta.isoformat()
expires = expires and expires.isoformat()
body = {"task": task_name,
"id": task_id,
"args": task_args or [],
"kwargs": task_kwargs or {},
"retries": retries or 0,
"eta": eta,
"expires": expires}
if taskset_id:
body["taskset"] = taskset_id
send = self.send
if retry is None and self.retry or retry:
send = connection.ensure(self, self.send, **_retry_policy)
send(body, exchange=exchange, **extract_msg_options(kwargs))
signals.task_sent.send(sender=task_name, **body)
if event_dispatcher:
event_dispatcher.send("task-sent", uuid=task_id,
name=task_name,
args=repr(task_args),
kwargs=repr(task_kwargs),
retries=retries,
eta=eta,
expires=expires)
return task_id
def __exit__(self, *exc_info):
try:
self.release()
except AttributeError:
self.close()
class PublisherPool(Resource):
def __init__(self, limit=None, app=None):
self.app = app
self.connections = self.app.broker_connection().Pool(limit=limit)
super(PublisherPool, self).__init__(limit=limit)
def create_publisher(self):
return self.app.amqp.TaskPublisher(self.connections.acquire(),
auto_declare=False)
def new(self):
return promise(self.create_publisher)
def setup(self):
if self.limit:
for _ in xrange(self.limit):
self._resource.put_nowait(self.new())
def prepare(self, publisher):
return maybe_promise(publisher)
class AMQP(object):
BrokerConnection = BrokerConnection
Publisher = messaging.Publisher
Consumer = messaging.Consumer
ConsumerSet = messaging.ConsumerSet
#: Cached and prepared routing table.
_rtable = None
def __init__(self, app):
self.app = app
def flush_routes(self):
self._rtable = _routes.prepare(self.app.conf.CELERY_ROUTES)
def Queues(self, queues):
"""Create new :class:`Queues` instance, using queue defaults
from the current configuration."""
conf = self.app.conf
if not queues:
queues = {conf.CELERY_DEFAULT_QUEUE: {
"exchange": conf.CELERY_DEFAULT_EXCHANGE,
"exchange_type": conf.CELERY_DEFAULT_EXCHANGE_TYPE,
"binding_key": conf.CELERY_DEFAULT_ROUTING_KEY}}
return Queues.with_defaults(queues, conf.CELERY_DEFAULT_EXCHANGE,
conf.CELERY_DEFAULT_EXCHANGE_TYPE)
def Router(self, queues=None, create_missing=None):
"""Returns the current task router."""
return _routes.Router(self.routes, queues or self.queues,
self.app.either("CELERY_CREATE_MISSING_QUEUES",
create_missing), app=self.app)
def TaskConsumer(self, *args, **kwargs):
"""Returns consumer for a single task queue."""
default_queue_name, default_queue = self.get_default_queue()
defaults = dict({"queue": default_queue_name}, **default_queue)
defaults["routing_key"] = defaults.pop("binding_key", None)
return self.Consumer(*args,
**self.app.merge(defaults, kwargs))
def TaskPublisher(self, *args, **kwargs):
"""Returns publisher used to send tasks.
You should use `app.send_task` instead.
"""
conf = self.app.conf
_, default_queue = self.get_default_queue()
defaults = {"exchange": default_queue["exchange"],
"exchange_type": default_queue["exchange_type"],
"routing_key": conf.CELERY_DEFAULT_ROUTING_KEY,
"serializer": conf.CELERY_TASK_SERIALIZER,
"retry": conf.CELERY_TASK_PUBLISH_RETRY,
"retry_policy": conf.CELERY_TASK_PUBLISH_RETRY_POLICY,
"app": self}
return TaskPublisher(*args, **self.app.merge(defaults, kwargs))
def PublisherPool(self, limit=None):
return PublisherPool(limit=limit, app=self.app)
def get_task_consumer(self, connection, queues=None, **kwargs):
"""Return consumer configured to consume from all known task
queues."""
return self.ConsumerSet(connection, from_dict=queues or self.queues,
**kwargs)
def get_default_queue(self):
"""Returns `(queue_name, queue_options)` tuple for the queue
configured to be default (:setting:`CELERY_DEFAULT_QUEUE`)."""
q = self.app.conf.CELERY_DEFAULT_QUEUE
return q, self.queues[q]
@cached_property
def queues(self):
"""Queue name⇒ declaration mapping."""
return self.Queues(self.app.conf.CELERY_QUEUES)
@queues.setter
def queues(self, value):
return self.Queues(value)
@property
def routes(self):
if self._rtable is None:
self.flush_routes()
return self._rtable
| 37.476879 | 78 | 0.599136 |
793ecfa6bf3510646fbfecd5ebd90c4bf2311651 | 1,246 | py | Python | mwaa/mwaa-cdk/app.py | 094459/time-series-and-data-lakes | 75540661764b8bca91debf625278985ceba7b5ca | [
"MIT"
] | 9 | 2021-12-03T17:51:42.000Z | 2022-03-17T08:45:05.000Z | mwaa/mwaa-cdk/app.py | 094459/time-series-and-data-lakes | 75540661764b8bca91debf625278985ceba7b5ca | [
"MIT"
] | null | null | null | mwaa/mwaa-cdk/app.py | 094459/time-series-and-data-lakes | 75540661764b8bca91debf625278985ceba7b5ca | [
"MIT"
] | 1 | 2021-12-12T16:00:31.000Z | 2021-12-12T16:00:31.000Z | #!/usr/bin/env python3
import io
from aws_cdk import core
from mwaa_cdk.deploy_files import MwaaCdkStackDeployFiles
from mwaa_cdk.mwaa_cdk_backend import MwaaCdkStackBackend
from mwaa_cdk.mwaa_cdk_env import MwaaCdkStackEnv
# Chnage the mwaa_secret to the ARN of the secret you create via the AWS cli
# The example below is a dummy value
env_EU=core.Environment(region="eu-west-1", account="704533066374")
mwaa_props = {
'dagss3location': 'airflow-timestream-datalake-demo',
'mwaa_env' : 'airflow-timestream-datalake',
'mwaa_secrets' : 'airflow/variables',
'mwaa_ts_iam_arn' : 'arn:aws:iam::704533066374:policy/time-series-and-data-lakes-MWAAPolicyCBFB7F6C-1M1XY2GN81694',
'datalake_bucket' : 'time-series-and-data-lakes-datalake9060eab7-1qga4qb5ly9vn'
}
app = core.App()
mwaa_backend = MwaaCdkStackBackend(
scope=app,
id="timestream-MWAA-vpc",
env=env_EU,
mwaa_props=mwaa_props
)
mwaa_env = MwaaCdkStackEnv(
scope=app,
id="timestream-MWAA-env",
vpc=mwaa_backend.vpc,
env=env_EU,
mwaa_props=mwaa_props
)
mwaa_filedeploy = MwaaCdkStackDeployFiles(
scope=app,
id="timestream-MWAA-deploy",
vpc=mwaa_backend.vpc,
env=env_EU,
mwaa_props=mwaa_props
)
app.synth()
| 26.510638 | 119 | 0.745586 |
793ed0be48a97f45b8471dedba640129bd8df144 | 2,998 | py | Python | blog/views/manage/request_check/request_check.py | multfunc/blog_server | fd00653de77d07c3cbc1472a3e027a2a793aa6a9 | [
"MIT"
] | null | null | null | blog/views/manage/request_check/request_check.py | multfunc/blog_server | fd00653de77d07c3cbc1472a3e027a2a793aa6a9 | [
"MIT"
] | 3 | 2019-07-26T00:53:12.000Z | 2021-12-13T20:33:55.000Z | blog/views/manage/request_check/request_check.py | multfunc/blog_server | fd00653de77d07c3cbc1472a3e027a2a793aa6a9 | [
"MIT"
] | null | null | null | from flask import Blueprint, request, current_app
from flask_mail import Message
from datetime import datetime
# models
from blog.models.base import db
from blog.models.log.log_request_http import LogRequestHttp
from blog.models.user_info import UserInfo
from blog.models.user_status import UserStatus
from blog.models.role import Role
from blog.models.authority import Authority
from blog.models.role import Role
from blog.utils.rsa_utils import rsa_utils
request_check_bp = Blueprint('request_check', __name__, url_prefix='/')
@request_check_bp.before_app_request
def log_save():
ip_original = request.remote_addr
user_agent = str(request.user_agent)
referrer = request.referrer
ip_destination = current_app.config['IP_LOCAL']
host = request.host
uri = request.path
method = request.method
request_data = request.data
port_destination = current_app.config['PORT_LOCAL']
create = datetime.now()
log_request_http = LogRequestHttp(ip_original=ip_original, ip_destination=ip_destination, host=host, uri=uri,
method=method, request_data=request_data, user_agent=user_agent,
port_destination=port_destination, referrer=referrer, create=create)
try:
db.session.add_all([log_request_http])
db.session.commit()
except Exception as e:
print(e)
db.session.rollback()
@request_check_bp.before_app_first_request
def authority_verify():
ip_original = request.remote_addr
# message = Message(subject='hello flask-mail', recipients=['379505061@qq.com'],
# body=f'{ip_original}:flask-mail 测试代码')
# with open(f'{current_app.config["IMAGE_DIR"]}\\static\\img\\2.jpg', mode='rb') as file:
# message.attach(filename='测试图片.jpg', content_type="image/jpg", data=file.read())
# try:
# # current_app.mail.send(message)
# print("邮件发送成功")
# print("并没有发邮件")
# except Exception as e:
# print(e)
user_info = db.session.query(UserInfo).filter(UserInfo.account == ip_original).one_or_none()
if user_info:
pass
else:
role = db.session.query(Role).filter(Role.name == 'Guest').one_or_none()
password = rsa_utils.encrypt_by_PKCS1_OAEP("123456".encode('utf8'))
tmp_user_info = UserInfo(account=ip_original, name='临时游客', password=password, modified=datetime.now(),
create=datetime.now(), roles=[role])
db.session.add_all([tmp_user_info])
#发邮件通知有新人访问
message = Message(subject='hello flask-mail', recipients=['379505061@qq.com'],
body=f'{ip_original}:Hi,我来访问了')
try:
current_app.mail.send(message)
print("邮件发送成功")
print("并没有发邮件")
except Exception as e:
print(e)
try:
db.session.commit()
except Exception as e:
print(e)
db.session.rollback()
| 37.475 | 113 | 0.659106 |
793ed2afd633d04f9875c822d228589c6333cac3 | 1,356 | py | Python | app/__init__.py | Hassan-abdi-dev/picth-here | 1f7fac98350720ecf4047d49ab5286b0ef8c3df0 | [
"Unlicense"
] | 1 | 2021-12-12T23:33:25.000Z | 2021-12-12T23:33:25.000Z | app/__init__.py | Hassan-abdi-dev/picth-here | 1f7fac98350720ecf4047d49ab5286b0ef8c3df0 | [
"Unlicense"
] | 4 | 2019-11-24T21:10:29.000Z | 2021-04-30T20:57:36.000Z | app/__init__.py | Hassan-abdi-dev/picth-here | 1f7fac98350720ecf4047d49ab5286b0ef8c3df0 | [
"Unlicense"
] | 2 | 2020-01-16T06:35:46.000Z | 2020-01-16T06:48:06.000Z | from flask import Flask
from flask_bootstrap import Bootstrap
from config import config_options
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_simplemde import SimpleMDE
# Instances of flask extensions
# Instance of LoginManger and using its methods
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
bootstrap = Bootstrap()
db = SQLAlchemy()
simple = SimpleMDE()
def create_app(config_name):
'''
Function that takes configuration setting key as an argument
Args:
config_name : name of the configuration to be used
'''
# Initialising application
app = Flask(__name__)
simple.init_app(app)
# Creating the app configurations
app.config.from_object(config_options[config_name])
# Initialising flask extensions
bootstrap.init_app(app)
db.init_app(app)
login_manager.init_app(app)
# Regestering the main blueprint
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
# Regestering the auth bluprint
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
# Setting config when using an API
# from .requests import configure_request
# configure_request(app)
return app | 25.584906 | 64 | 0.75295 |
793ed2f0d5ae6d79543c1c5d07666ed6e414f582 | 29,865 | py | Python | minikerberos/asn1_structs.py | fuckup1337/minikerberos | 4c7d6a9d791b6a7b05a211a5bccb6c4e6c37187e | [
"MIT"
] | null | null | null | minikerberos/asn1_structs.py | fuckup1337/minikerberos | 4c7d6a9d791b6a7b05a211a5bccb6c4e6c37187e | [
"MIT"
] | null | null | null | minikerberos/asn1_structs.py | fuckup1337/minikerberos | 4c7d6a9d791b6a7b05a211a5bccb6c4e6c37187e | [
"MIT"
] | 1 | 2019-10-06T17:15:43.000Z | 2019-10-06T17:15:43.000Z | #!/usr/bin/env python3
#
# Author:
# Tamas Jos (@skelsec)
#
# https://zeroshell.org/kerberos/kerberos-operation/
from asn1crypto import core
import enum
import os
# KerberosV5Spec2 DEFINITIONS EXPLICIT TAGS ::=
TAG = 'explicit'
# class
UNIVERSAL = 0
APPLICATION = 1
CONTEXT = 2
krb5_pvno = 5 #-- current Kerberos protocol version number
"""
class NegotiationToken(core.Choice):
_alternatives = [
#('NegTokenInit2', NegTokenInit2, {'implicit': (0,16) } ), #NegTokenInit2 the '2' in the name is because Microsoft added modifications to the original rfc :)
('NegTokenInit2', NegTokenInit2, {'implicit': (0,16) } ), #NegTokenInit2 the '2' in the name is because Microsoft added modifications to the original rfc :)
('negTokenResp', negTokenResp, {'explicit': (2,1) } ),
]
"""
class PADATA_TYPE(core.Enumerated):
_map = {
0 : 'NONE', #(0),
1 : 'TGS-REQ', #(1), # 1 : 'AP-REQ', #(1),
2 : 'ENC-TIMESTAMP', #(2),
3 : 'PW-SALT', #(3),
5 : 'ENC-UNIX-TIME', #(5),
6 : 'SANDIA-SECUREID', #(6),
7 : 'SESAME', #(7),
8 : 'OSF-DCE', #(8),
9 : 'CYBERSAFE-SECUREID', #(9),
10 : 'AFS3-SALT', #(10),
11 : 'ETYPE-INFO', #(11),
12 : 'SAM-CHALLENGE', #(12), -- ', #(sam/otp)
13 : 'SAM-RESPONSE', #(13), -- ', #(sam/otp)
14 : 'PK-AS-REQ-19', #(14), -- ', #(PKINIT-19)
15 : 'PK-AS-REP-19', #(15), -- ', #(PKINIT-19)
15 : 'PK-AS-REQ-WIN', #(15), -- ', #(PKINIT - old number)
16 : 'PK-AS-REQ', #(16), -- ', #(PKINIT-25)
17 : 'PK-AS-REP', #(17), -- ', #(PKINIT-25)
18 : 'PA-PK-OCSP-RESPONSE', #(18),
19 : 'ETYPE-INFO2', #(19),
20 : 'USE-SPECIFIED-KVNO', #(20),
20 : 'SVR-REFERRAL-INFO', #(20), --- old ms referral number
21 : 'SAM-REDIRECT', #(21), -- ', #(sam/otp)
22 : 'GET-FROM-TYPED-DATA', #(22),
23 : 'SAM-ETYPE-INFO', #(23),
25 : 'SERVER-REFERRAL', #(25),
24 : 'ALT-PRINC', #(24), -- ', #(crawdad@fnal.gov)
30 : 'SAM-CHALLENGE2', #(30), -- ', #(kenh@pobox.com)
31 : 'SAM-RESPONSE2', #(31), -- ', #(kenh@pobox.com)
41 : 'PA-EXTRA-TGT', #(41), -- Reserved extra TGT
102 : 'TD-KRB-PRINCIPAL', #(102), -- PrincipalName
104 : 'PK-TD-TRUSTED-CERTIFIERS', #(104), -- PKINIT
105 : 'PK-TD-CERTIFICATE-INDEX', #(105), -- PKINIT
106 : 'TD-APP-DEFINED-ERROR', #(106), -- application specific
107 : 'TD-REQ-NONCE', #(107), -- INTEGER
108 : 'TD-REQ-SEQ', #(108), -- INTEGER
128 : 'PA-PAC-REQUEST', #(128), -- jbrezak@exchange.microsoft.com
129 : 'PA-FOR-USER', #(129), -- MS-KILE
130 : 'FOR-X509-USER', #(130), -- MS-KILE
131 : 'FOR-CHECK-DUPS', #(131), -- MS-KILE
132 : 'AS-CHECKSUM', #(132), -- MS-KILE
132 : 'PK-AS-09-BINDING', #(132), -- client send this to -- tell KDC that is supports -- the asCheckSum in the -- PK-AS-REP
133 : 'CLIENT-CANONICALIZED', #(133), -- referals
133 : 'FX-COOKIE', #(133), -- krb-wg-preauth-framework
134 : 'AUTHENTICATION-SET', #(134), -- krb-wg-preauth-framework
135 : 'AUTH-SET-SELECTED', #(135), -- krb-wg-preauth-framework
136 : 'FX-FAST', #(136), -- krb-wg-preauth-framework
137 : 'FX-ERROR', #(137), -- krb-wg-preauth-framework
138 : 'ENCRYPTED-CHALLENGE', #(138), -- krb-wg-preauth-framework
141 : 'OTP-CHALLENGE', #(141), -- ', #(gareth.richards@rsa.com)
142 : 'OTP-REQUEST', #(142), -- ', #(gareth.richards@rsa.com)
143 : 'OTP-CONFIRM', #(143), -- ', #(gareth.richards@rsa.com)
144 : 'OTP-PIN-CHANGE', #(144), -- ', #(gareth.richards@rsa.com)
145 : 'EPAK-AS-REQ', #(145),
146 : 'EPAK-AS-REP', #(146),
147 : 'PKINIT-KX', #(147), -- krb-wg-anon
148 : 'PKU2U-NAME', #(148), -- zhu-pku2u
149 : 'REQ-ENC-PA-REP', #(149), --
151 : 'SPAKE', #(151), https://datatracker.ietf.org/doc/draft-ietf-kitten-krb-spake-preauth/?include_text=1
165 : 'SUPPORTED-ETYPES', #(165) -- MS-KILE
167 : 'PA-PAC-OPTIONS',
}
class AUTHDATA_TYPE(core.Enumerated):
_map = {
1 : 'IF-RELEVANT', #1),
2 : 'INTENDED-FOR_SERVER', #2),
3 : 'INTENDED-FOR-APPLICATION-CLASS', #3),
4 : 'KDC-ISSUED', #4),
5 : 'AND-OR', #5),
6 : 'MANDATORY-TICKET-EXTENSIONS', #6),
7 : 'IN-TICKET-EXTENSIONS', #7),
8 : 'MANDATORY-FOR-KDC', #8),
9 : 'INITIAL-VERIFIED-CAS', #9),
64 : 'OSF-DCE', #64),
65 : 'SESAME', #65),
66 : 'OSF-DCE-PKI-CERTID', #66),
128 : 'WIN2K-PAC', #128),
129 : 'GSS-API-ETYPE-NEGOTIATION', #129), -- Authenticator only
-17 : 'SIGNTICKET-OLDER', #-17),
142 : 'SIGNTICKET-OLD', #142),
512 : 'SIGNTICKET', #512)
}
class CKSUMTYPE(core.Enumerated):
_map = {
0 : 'NONE', #0),
1 : 'CRC32', #1),
2 : 'RSA_MD4', #2),
3 : 'RSA_MD4_DES', #3),
4 : 'DES_MAC', #4),
5 : 'DES_MAC_K', #5),
6 : 'RSA_MD4_DES_K', #6),
7 : 'RSA_MD5', #7),
8 : 'RSA_MD5_DES', #8),
9 : 'RSA_MD5_DES3', #9),
10 : 'SHA1_OTHER', #10),
12 : 'HMAC_SHA1_DES3', #12),
14 : 'SHA1', #14),
15 : 'HMAC_SHA1_96_AES_128', #15),
16 : 'HMAC_SHA1_96_AES_256', #16),
0x8003 : 'GSSAPI', #0x8003),
-138 : 'HMAC_MD5', #-138), -- unofficial microsoft number
-1138 : 'HMAC_MD5_ENC', #-1138) -- even more unofficial
}
#enctypes
class ENCTYPE(core.Enumerated):
_map = {
0 : 'NULL', #0),
1 : 'DES_CBC_CRC', #1),
2 : 'DES_CBC_MD4', #2),
3 : 'DES_CBC_MD5', #3),
5 : 'DES3_CBC_MD5', #5),
7 : 'OLD_DES3_CBC_SHA1', #7),
8 : 'SIGN_DSA_GENERATE', #8),
9 : 'ENCRYPT_RSA_PRIV', #9),
10 : 'ENCRYPT_RSA_PUB', #10),
16 : 'DES3_CBC_SHA1', #16), -- with key derivation
17 : 'AES128_CTS_HMAC_SHA1_96', #17),
18 : 'AES256_CTS_HMAC_SHA1_96', #18),
23 : 'ARCFOUR_HMAC_MD5', #23),
24 : 'ARCFOUR_HMAC_MD5_56', #24),
48 : 'ENCTYPE_PK_CROSS', #48),
#-- some "old" windows types
-128 : 'ARCFOUR_MD4', #-128),
-133 : 'ARCFOUR_HMAC_OLD', #-133),
-135 : 'ARCFOUR_HMAC_OLD_EXP', #-135),
#-- these are for Heimdal internal use
-0x1000 : 'DES_CBC_NONE', #-0x1000),
-0x1001 : 'DES3_CBC_NONE', #-0x1001),
-0x1002 : 'DES_CFB64_NONE', #-0x1002),
-0x1003 : 'DES_PCBC_NONE', #-0x1003),
-0x1004 : 'DIGEST_MD5_NONE', #-0x1004), -- private use, lukeh@padl.com
-0x1005 : 'CRAM_MD5_NONE', #-0x1005) -- private use, lukeh@padl.com
}
class SequenceOfEnctype(core.SequenceOf):
_child_spec = core.Integer
class Microseconds(core.Integer):
""" ::= INTEGER (0..999999)
-- microseconds
"""
class krb5int32 (core.Integer):
"""krb5int32 ::= INTEGER (-2147483648..2147483647)
"""
class krb5uint32 (core.Integer):
"""krb5uint32 ::= INTEGER (0..4294967295)
"""
class KerberosString(core.GeneralString):
"""KerberosString ::= GeneralString (IA5String)
For compatibility, implementations MAY choose to accept GeneralString
values that contain characters other than those permitted by
IA5String...
"""
class SequenceOfKerberosString(core.SequenceOf):
_child_spec = KerberosString
# https://github.com/tiran/kkdcpasn1/blob/asn1crypto/pykkdcpasn1.py
class Realm(KerberosString):
"""Realm ::= KerberosString
"""
# https://github.com/tiran/kkdcpasn1/blob/asn1crypto/pykkdcpasn1.py
class PrincipalName(core.Sequence):
"""PrincipalName for KDC-REQ-BODY and Ticket
PrincipalName ::= SEQUENCE {
name-type [0] Int32,
name-string [1] SEQUENCE OF KerberosString
}
"""
_fields = [
('name-type', krb5int32, {'tag_type': TAG, 'tag': 0}),
('name-string', SequenceOfKerberosString, {'tag_type': TAG, 'tag': 1}),
]
class Principal(core.Sequence):
_fields = [
('name', PrincipalName, {'tag_type': TAG, 'tag': 0}),
('realm', Realm, {'tag_type': TAG, 'tag': 1}),
]
class Principals(core.SequenceOf):
_child_spec = Principal
class HostAddress(core.Sequence):
"""HostAddress for HostAddresses
HostAddress ::= SEQUENCE {
addr-type [0] Int32,
address [1] OCTET STRING
}
"""
_fields = [
('addr-type', krb5int32, {'tag_type': TAG, 'tag': 0}),
('address', core.OctetString, {'tag_type': TAG, 'tag': 1}),
]
class HostAddresses(core.SequenceOf):
"""SEQUENCE OF HostAddress
"""
_child_spec = HostAddress
class KerberosTime(core.GeneralizedTime):
"""KerberosTime ::= GeneralizedTime
"""
class AuthorizationDataElement(core.Sequence):
_fields = [
('ad-type', krb5int32, {'tag_type': TAG, 'tag': 0}),
('ad-data', core.OctetString, {'tag_type': TAG, 'tag': 1}),
]
class AuthorizationData(core.SequenceOf):
"""SEQUENCE OF HostAddress
"""
_child_spec = AuthorizationDataElement
class APOptions(core.BitString):
_map = {
0 : 'reserved', #(0),
1 : 'use-session-key', #(1),
2 : 'mutual-required', #(2)
}
class TicketFlags(core.BitString):
_map = {
0: 'reserved',
1: 'forwardable',
2: 'forwarded',
3: 'proxiable',
4: 'proxy',
5: 'may-postdate',
6: 'postdated',
7: 'invalid',
8: 'renewable',
9: 'initial',
10: 'pre-authent',
11: 'hw-authent',
12: 'transited-policy-checked',
13: 'ok-as-delegate',
14: 'anonymous',
15: 'enc-pa-rep'
}
class KDCOptions(core.BitString):
_map = {
0: 'reserved',
1: 'forwardable',
2: 'forwarded',
3: 'proxiable',
4: 'proxy',
5: 'allow-postdate',
6: 'postdated',
7: 'unused7',
8: 'renewable',
9: 'unused9',
10: 'unused10',
11: 'opt-hardware-auth',
12: 'unused12',
13: 'unused13',
14: 'constrained-delegation', #-- cname-in-addl-tkt (14)
15: 'canonicalize',
16: 'request-anonymous',
17: 'unused17',
18: 'unused18',
19: 'unused19',
20: 'unused20',
21: 'unused21',
22: 'unused22',
23: 'unused23',
24: 'unused24',
25: 'unused25',
26: 'disable-transited-check',
27: 'renewable-ok',
28: 'enc-tkt-in-skey',
30: 'renew',
31: 'validate',
}
class LR_TYPE(core.Enumerated):
_map = {
0 : 'NONE', #0), -- no information
1 : 'INITIAL_TGT', #1), -- last initial TGT request
2 : 'INITIAL', #2), -- last initial request
3 : 'ISSUE_USE_TGT', #3), -- time of newest TGT used
4 : 'RENEWAL', #4), -- time of last renewal
5 : 'REQUEST', #5), -- time of last request ', #of any type)
6 : 'PW_EXPTIME', #6), -- expiration time of password
7 : 'ACCT_EXPTIME', #7) -- expiration time of account
}
class LastReqInner(core.Sequence):
_fields = [
('lr-type', krb5int32, {'tag_type': TAG, 'tag': 0}), #LR_TYPE
('lr-value', KerberosTime, {'tag_type': TAG, 'tag': 1}),
]
class LastReq(core.SequenceOf):
_child_spec = LastReqInner
class EncryptedData(core.Sequence):
_fields = [
('etype', krb5int32, {'tag_type': TAG, 'tag': 0}), #-- EncryptionType
('kvno', krb5uint32, {'tag_type': TAG, 'tag': 1, 'optional': True}), #
('cipher', core.OctetString, {'tag_type': TAG, 'tag': 2}), #ciphertext
]
class EncryptionKey(core.Sequence):
_fields = [
('keytype', krb5uint32, {'tag_type': TAG, 'tag': 0}), #-- EncryptionType
('keyvalue', core.OctetString, {'tag_type': TAG, 'tag': 1}), #
]
#-- encoded Transited field
class TransitedEncoding(core.Sequence):
_fields = [
('tr-type', krb5uint32, {'tag_type': TAG, 'tag': 0}), #-- must be registered
('contents', core.OctetString, {'tag_type': TAG, 'tag': 1}), #
]
# https://github.com/tiran/kkdcpasn1/blob/asn1crypto/pykkdcpasn1.py
class Ticket(core.Sequence):
explicit = (APPLICATION,1)
_fields = [
('tkt-vno', krb5int32, {'tag_type': TAG, 'tag': 0}),
('realm', Realm, {'tag_type': TAG, 'tag': 1}),
('sname', PrincipalName, {'tag_type': TAG, 'tag': 2}),
('enc-part', EncryptedData, {'tag_type': TAG, 'tag': 3}), #EncTicketPart
]
class SequenceOfTicket(core.SequenceOf):
"""SEQUENCE OF Ticket for KDC-REQ-BODY
"""
_child_spec = Ticket
#-- Encrypted part of ticket
class EncTicketPart(core.Sequence):
explicit = (APPLICATION, 3)
_fields = [
('flags', TicketFlags, {'tag_type': TAG, 'tag': 0}),
('key', EncryptionKey, {'tag_type': TAG, 'tag': 1}),
('crealm', Realm, {'tag_type': TAG, 'tag': 2}),
('cname', PrincipalName, {'tag_type': TAG, 'tag': 3}),
('transited', TransitedEncoding, {'tag_type': TAG, 'tag': 4}),
('authtime', KerberosTime, {'tag_type': TAG, 'tag': 5}),
('starttime', KerberosTime, {'tag_type': TAG, 'tag': 6, 'optional': True}),
('endtime', KerberosTime, {'tag_type': TAG, 'tag': 7}),
('renew-till', KerberosTime, {'tag_type': TAG, 'tag': 8, 'optional': True}),
('caddr', HostAddresses, {'tag_type': TAG, 'tag': 9, 'optional': True}),
('authorization-data', AuthorizationData, {'tag_type': TAG, 'tag': 10, 'optional': True}),
]
class Checksum(core.Sequence):
_fields = [
('cksumtype', krb5int32, {'tag_type': TAG, 'tag': 0}), #CKSUMTYPE
('checksum', core.OctetString, {'tag_type': TAG, 'tag': 1}),
]
class Authenticator(core.Sequence):
explicit = (APPLICATION,2)
_fields = [
('authenticator-vno', krb5int32, {'tag_type': TAG, 'tag': 0}),
('crealm', Realm, {'tag_type': TAG, 'tag': 1}),
('cname', PrincipalName, {'tag_type': TAG, 'tag': 2}),
('cksum', Checksum, {'tag_type': TAG, 'tag': 3, 'optional': True}),
('cusec', krb5int32, {'tag_type': TAG, 'tag': 4}),
('ctime', KerberosTime, {'tag_type': TAG, 'tag': 5}),
('subkey', EncryptionKey, {'tag_type': TAG, 'tag': 6, 'optional': True}),
('seq-number', krb5uint32, {'tag_type': TAG, 'tag': 7, 'optional': True}),
('authorization-data', AuthorizationData, {'tag_type': TAG, 'tag': 8, 'optional': True}),
]
class PA_DATA(core.Sequence): #!!!! IT STARTS AT ONE!!!!
_fields = [
('padata-type', core.Integer, {'tag_type': TAG, 'tag': 1}),
('padata-value', core.OctetString, {'tag_type': TAG, 'tag': 2}),
]
class ETYPE_INFO_ENTRY(core.Sequence):
_fields = [
('etype', krb5int32, {'tag_type': TAG, 'tag': 0}),
('salt', core.OctetString, {'tag_type': TAG, 'tag': 1, 'optional': True}),
('salttype', krb5int32, {'tag_type': TAG, 'tag': 2, 'optional': True}),
]
class ETYPE_INFO(core.SequenceOf):
_child_spec = ETYPE_INFO_ENTRY
class ETYPE_INFO2_ENTRY(core.Sequence):
_fields = [
('etype', krb5int32, {'tag_type': TAG, 'tag': 0}),
('salt', KerberosString, {'tag_type': TAG, 'tag': 1, 'optional': True}),
('s2kparams', core.OctetString, {'tag_type': TAG, 'tag': 2, 'optional': True}),
]
class ETYPE_INFO2(core.SequenceOf):
_child_spec = ETYPE_INFO2_ENTRY
class METHOD_DATA(core.SequenceOf):
_child_spec = PA_DATA
class TypedData(core.Sequence):
_fields = [
('data-type', krb5int32, {'tag_type': TAG, 'tag': 0}),
('data-value', core.OctetString, {'tag_type': TAG, 'tag': 1, 'optional': True}),
]
"""
class TYPED-DATA ::= SEQUENCE SIZE (1..MAX) OF TypedData
"""
class KDC_REQ_BODY(core.Sequence):
_fields = [
('kdc-options', KDCOptions, {'tag_type': TAG, 'tag': 0}),
('cname', PrincipalName, {'tag_type': TAG, 'tag': 1, 'optional': True}),
('realm', Realm, {'tag_type': TAG, 'tag': 2}),
('sname', PrincipalName , {'tag_type': TAG, 'tag': 3, 'optional': True}),
('from', KerberosTime , {'tag_type': TAG, 'tag': 4, 'optional': True}),
('till', KerberosTime , {'tag_type': TAG, 'tag': 5, 'optional': True}),
('rtime', KerberosTime , {'tag_type': TAG, 'tag': 6, 'optional': True}),
('nonce', krb5int32 , {'tag_type': TAG, 'tag': 7}),
('etype', SequenceOfEnctype , {'tag_type': TAG, 'tag': 8}), # -- EncryptionType,preference order
('addresses', HostAddresses , {'tag_type': TAG, 'tag': 9, 'optional': True}),
('enc-authorization-data', EncryptedData , {'tag_type': TAG, 'tag': 10, 'optional': True}), #-- Encrypted AuthorizationData encoding
('additional-tickets', SequenceOfTicket , {'tag_type': TAG, 'tag': 11, 'optional': True}),
]
class KDC_REQ(core.Sequence):
_fields = [
('pvno', krb5int32, {'tag_type': TAG, 'tag': 1}),
('msg-type', krb5int32, {'tag_type': TAG, 'tag': 2}), #MESSAGE_TYPE
('padata', METHOD_DATA , {'tag_type': TAG, 'tag': 3, 'optional': True}),
('req-body', KDC_REQ_BODY , {'tag_type': TAG, 'tag': 4}),
]
class AS_REQ(KDC_REQ):
explicit = (APPLICATION, 10)
class TGS_REQ(KDC_REQ):
explicit = (APPLICATION, 12)
#-- padata-type ::= PA-ENC-TIMESTAMP
#-- padata-value ::= EncryptedData - PA-ENC-TS-ENC
class PA_PAC_OPTIONSTypes(core.BitString):
_map = {
0: 'Claims',
1: 'Branch Aware',
2: 'Forward to Full DC',
3: 'resource-based constrained delegation',
}
class PA_PAC_OPTIONS(core.Sequence):
_fields = [
('value', PA_PAC_OPTIONSTypes, {'tag_type': TAG, 'tag': 0}),
]
class PA_ENC_TS_ENC(core.Sequence):
_fields = [
('patimestamp', KerberosTime, {'tag_type': TAG, 'tag': 0}), #-- client's time
('pausec', krb5int32, {'tag_type': TAG, 'tag': 1, 'optional':True}),
]
#-- draft-brezak-win2k-krb-authz-01
class PA_PAC_REQUEST(core.Sequence):
_fields = [
('include-pac', core.Boolean, {'tag_type': TAG, 'tag': 0}), #-- Indicates whether a PAC should be included or not
]
#-- PacketCable provisioning server location, PKT-SP-SEC-I09-030728.pdf
class PROV_SRV_LOCATION(core.GeneralString):
pass
class KDC_REP(core.Sequence):
_fields = [
('pvno', core.Integer, {'tag_type': TAG, 'tag': 0}),
('msg-type', krb5int32, {'tag_type': TAG, 'tag': 1}), #MESSAGE_TYPE
('padata', METHOD_DATA, {'tag_type': TAG, 'tag': 2, 'optional': True}),
('crealm', Realm , {'tag_type': TAG, 'tag': 3}),
('cname', PrincipalName , {'tag_type': TAG, 'tag': 4}),
('ticket', Ticket , {'tag_type': TAG, 'tag': 5}),
('enc-part', EncryptedData , {'tag_type': TAG, 'tag': 6}), #EncKDCRepPart
]
class AS_REP(KDC_REP):
#::= [APPLICATION 11] KDC-REP
explicit = (APPLICATION, 11)
class TGS_REP(KDC_REP): # ::= [APPLICATION 13] KDC-REP
explicit = (APPLICATION, 13)
class EncKDCRepPart(core.Sequence):
_fields = [
('key', EncryptionKey, {'tag_type': TAG, 'tag': 0}),
('last-req', LastReq, {'tag_type': TAG, 'tag': 1}),
('nonce', krb5int32, {'tag_type': TAG, 'tag': 2}),
('key-expiration', KerberosTime , {'tag_type': TAG, 'tag': 3, 'optional': True}),
('flags', TicketFlags , {'tag_type': TAG, 'tag': 4}),
('authtime', KerberosTime , {'tag_type': TAG, 'tag': 5}),
('starttime', KerberosTime , {'tag_type': TAG, 'tag': 6, 'optional': True}),
('endtime', KerberosTime , {'tag_type': TAG, 'tag': 7}),
('renew-till', KerberosTime , {'tag_type': TAG, 'tag': 8, 'optional': True}),
('srealm', Realm , {'tag_type': TAG, 'tag': 9}),
('sname', PrincipalName , {'tag_type': TAG, 'tag': 10}),
('caddr', HostAddresses , {'tag_type': TAG, 'tag': 11, 'optional': True}),
('encrypted-pa-data', METHOD_DATA , {'tag_type': TAG, 'tag': 12, 'optional': True}),
]
class EncASRepPart(EncKDCRepPart):
explicit = (APPLICATION, 25)
class EncTGSRepPart(EncKDCRepPart):
explicit = (APPLICATION, 26)
class AP_REQ(core.Sequence):
explicit = (APPLICATION, 14)
_fields = [
('pvno', krb5int32, {'tag_type': TAG, 'tag': 0}),
('msg-type', krb5int32, {'tag_type': TAG, 'tag': 1}), #MESSAGE_TYPE
('ap-options', APOptions, {'tag_type': TAG, 'tag': 2}),
('ticket', Ticket , {'tag_type': TAG, 'tag': 3}),
('authenticator', EncryptedData , {'tag_type': TAG, 'tag': 4}),
]
class AP_REP(core.Sequence):
explicit = (APPLICATION, 15)
_fields = [
('pvno', krb5int32, {'tag_type': TAG, 'tag': 0}),
('msg-type', krb5int32, {'tag_type': TAG, 'tag': 1}),#MESSAGE_TYPE
('enc-part', EncryptedData , {'tag_type': TAG, 'tag': 2}),
]
class EncAPRepPart(core.Sequence):
explicit = (APPLICATION, 27)
_fields = [
('ctime', KerberosTime, {'tag_type': TAG, 'tag': 0}),
('cusec', krb5int32, {'tag_type': TAG, 'tag': 1}),
('subkey', EncryptionKey , {'tag_type': TAG, 'tag': 2, 'optional': True}),
('seq-number', krb5uint32 , {'tag_type': TAG, 'tag': 3, 'optional': True}),
]
class KRB_SAFE_BODY(core.Sequence):
_fields = [
('user-data', core.OctetString, {'tag_type': TAG, 'tag': 0}),
('timestamp', KerberosTime, {'tag_type': TAG, 'tag': 1, 'optional': True}),
('usec', krb5int32 , {'tag_type': TAG, 'tag': 2, 'optional': True}),
('seq-number', krb5uint32 , {'tag_type': TAG, 'tag': 3, 'optional': True}),
('s-address', HostAddress , {'tag_type': TAG, 'tag': 4, 'optional': True}),
('r-address', HostAddress , {'tag_type': TAG, 'tag': 5, 'optional': True}),
]
class KRB_SAFE(core.Sequence):
explicit = (APPLICATION, 20)
_fields = [
('pvno', krb5int32, {'tag_type': TAG, 'tag': 0}),
('msg-type', krb5int32, {'tag_type': TAG, 'tag': 1}),#MESSAGE_TYPE
('safe-body', KRB_SAFE_BODY , {'tag_type': TAG, 'tag': 2}),
('cksum', Checksum , {'tag_type': TAG, 'tag': 3}),
]
class KRB_PRIV(core.Sequence):
explicit = (APPLICATION, 21)
_fields = [
('pvno', krb5int32, {'tag_type': TAG, 'tag': 0}),
('msg-type', krb5int32, {'tag_type': TAG, 'tag': 1}),#MESSAGE_TYPE
('enc-part', EncryptedData, {'tag_type': TAG, 'tag': 2}),
]
class EncKrbPrivPart(core.Sequence):
explicit = (APPLICATION, 28)
_fields = [
('user-data', core.OctetString, {'tag_type': TAG, 'tag': 0}),
('timestamp', KerberosTime, {'tag_type': TAG, 'tag': 1, 'optional': True}),
('usec', krb5int32 , {'tag_type': TAG, 'tag': 2, 'optional': True}),
('seq-number', krb5uint32 , {'tag_type': TAG, 'tag': 3, 'optional': True}),
('s-address', HostAddress , {'tag_type': TAG, 'tag': 4, 'optional': True}),
('r-address', HostAddress , {'tag_type': TAG, 'tag': 5, 'optional': True}),
]
class KRB_CRED(core.Sequence):
explicit = (APPLICATION, 22)
_fields = [
('pvno', core.Integer, {'tag_type': TAG, 'tag': 0}),
('msg-type', core.Integer, {'tag_type': TAG, 'tag': 1}),
('tickets', SequenceOfTicket, {'tag_type': TAG, 'tag': 2}),
('enc-part', EncryptedData , {'tag_type': TAG, 'tag': 3}),
]
# http://web.mit.edu/freebsd/head/crypto/heimdal/lib/asn1/krb5.asn1
class KrbCredInfo(core.Sequence):
_fields = [
('key', EncryptionKey, {'tag_type': TAG, 'tag': 0}),
('prealm', Realm, {'tag_type': TAG, 'tag': 1, 'optional': True}),
('pname', PrincipalName, {'tag_type': TAG, 'tag': 2, 'optional': True}),
('flags', TicketFlags , {'tag_type': TAG, 'tag': 3, 'optional': True}),
('authtime', KerberosTime , {'tag_type': TAG, 'tag': 4, 'optional': True}),
('starttime', KerberosTime , {'tag_type': TAG, 'tag': 5, 'optional': True}),
('endtime', KerberosTime , {'tag_type': TAG, 'tag': 6, 'optional': True}),
('renew-till', KerberosTime , {'tag_type': TAG, 'tag': 7, 'optional': True}),
('srealm', Realm , {'tag_type': TAG, 'tag': 8, 'optional': True}),
('sname', PrincipalName , {'tag_type': TAG, 'tag': 9, 'optional': True}),
('caddr', HostAddresses , {'tag_type': TAG, 'tag': 10, 'optional': True}),
]
class SequenceOfKrbCredInfo(core.SequenceOf):
_child_spec = KrbCredInfo
class EncKrbCredPart(core.Sequence):
explicit = (APPLICATION, 29)
_fields = [
('ticket-info', SequenceOfKrbCredInfo, {'tag_type': TAG, 'tag': 0}),
('nonce', krb5int32, {'tag_type': TAG, 'tag': 1, 'optional': True}),
('timestamp', KerberosTime , {'tag_type': TAG, 'tag': 2, 'optional': True}),
('usec', krb5int32 , {'tag_type': TAG, 'tag': 3, 'optional': True}),
('s-address', HostAddress , {'tag_type': TAG, 'tag': 4, 'optional': True}),
('r-address', HostAddress , {'tag_type': TAG, 'tag': 5, 'optional': True}),
]
class KRB_ERROR(core.Sequence):
explicit = (APPLICATION, 30)
_fields = [
('pvno', krb5int32, {'tag_type': TAG, 'tag': 0}),
('msg-type',krb5int32 , {'tag_type': TAG, 'tag': 1}), #MESSAGE_TYPE
('ctime', KerberosTime , {'tag_type': TAG, 'tag': 2, 'optional': True}),
('cusec', krb5int32 , {'tag_type': TAG, 'tag': 3, 'optional': True}),
('stime', KerberosTime , {'tag_type': TAG, 'tag': 4}),
('susec', krb5int32 , {'tag_type': TAG, 'tag': 5}),
('error-code', krb5int32 , {'tag_type': TAG, 'tag': 6}),
('crealm', Realm , {'tag_type': TAG, 'tag': 7, 'optional': True}),
('cname', PrincipalName , {'tag_type': TAG, 'tag': 8, 'optional': True}),
('realm', Realm , {'tag_type': TAG, 'tag': 9}),
('sname', PrincipalName , {'tag_type': TAG, 'tag': 10}),
('e-text', core.GeneralString , {'tag_type': TAG, 'tag': 11, 'optional': True}),
('e-data', core.OctetString , {'tag_type': TAG, 'tag': 12, 'optional': True}),
]
class ChangePasswdDataMS(core.Sequence):
_fields = [
('newpasswd', core.OctetString, {'tag_type': TAG, 'tag': 0}),
('targname', PrincipalName, {'tag_type': TAG, 'tag': 1, 'optional': True}),
('targrealm', Realm , {'tag_type': TAG, 'tag': 2, 'optional': True}),
]
class EtypeList(core.SequenceOf):
#-- the client's proposed enctype list in
#-- decreasing preference order, favorite choice first
_child_spec = ENCTYPE
class KerberosResponse(core.Choice):
_alternatives = [
('AS_REP', AS_REP, {'implicit': (APPLICATION,11) } ),
('TGS_REP', TGS_REP, {'implicit': (APPLICATION,13) } ),
('KRB_ERROR', KRB_ERROR, {'implicit': (APPLICATION,30) } ),
]
class KRBCRED(core.Sequence):
explicit = (APPLICATION, 22)
_fields = [
('pvno', core.Integer, {'tag_type': TAG, 'tag': 0}),
('msg-type', core.Integer, {'tag_type': TAG, 'tag': 1}),
('tickets', SequenceOfTicket, {'tag_type': TAG, 'tag': 2}),
('enc-part', EncryptedData , {'tag_type': TAG, 'tag': 3}),
]
#https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-sfu/aceb70de-40f0-4409-87fa-df00ca145f5a
#other name: PA-S4U2Self
class PA_FOR_USER_ENC(core.Sequence):
_fields = [
('userName', PrincipalName, {'tag_type': TAG, 'tag': 0}),
('userRealm', Realm, {'tag_type': TAG, 'tag': 1}),
('cksum', Checksum, {'tag_type': TAG, 'tag': 2}),
('auth-package', KerberosString , {'tag_type': TAG, 'tag': 3}),
]
class S4UUserID(core.BitString):
_map = {
0x40000000 : 'check-logon-hour', #This option causes the KDC to check logon hour restrictions for the user.
0x20000000 : 'signed-with-kun-27', #In a request, asks the KDC to sign the reply with key usage number 27. In a reply, indicates that it was signed with key usage number 27.
}
#https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-sfu/cd9d5ca7-ce20-4693-872b-2f5dd41cbff6
class S4UUserID(core.Sequence):
_fields = [
('nonce', core.Integer, {'tag_type': TAG, 'tag': 0}), #-- the nonce in KDC-REQ-BODY
('cname', PrincipalName, {'tag_type': TAG, 'tag': 1, 'optional' : True}),
#-- Certificate mapping hints
('crealm', Realm, {'tag_type': TAG, 'tag': 2}),
('subject-certificate', core.OctetString, {'tag_type': TAG, 'tag': 3, 'optional' : True}),
('options', S4UUserID, {'tag_type': TAG, 'tag': 4, 'optional' : True}),
]
#https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-sfu/cd9d5ca7-ce20-4693-872b-2f5dd41cbff6
class PA_S4U_X509_USER(core.Sequence):
_fields = [
('user-id', S4UUserID, {'tag_type': TAG, 'tag': 0}),
('checksum', Checksum, {'tag_type': TAG, 'tag': 1}),
]
class AD_IF_RELEVANT(AuthorizationData):
pass
#
#DOMAIN-X500-COMPRESS krb5int32 ::= 1
#
#-- authorization data primitives
#
#AD-IF-RELEVANT ::= AuthorizationData
#
#AD-KDCIssued ::= SEQUENCE {
# ad-checksum[0] Checksum,
# i-realm[1] Realm OPTIONAL,
# i-sname[2] PrincipalName OPTIONAL,
# elements[3] AuthorizationData
#}
#
#AD-AND-OR ::= SEQUENCE {
# condition-count[0] INTEGER,
# elements[1] AuthorizationData
#}
#
#AD-MANDATORY-FOR-KDC ::= AuthorizationData
#
#-- PA-SAM-RESPONSE-2/PA-SAM-RESPONSE-2
#
#PA-SAM-TYPE ::= INTEGER {
# PA_SAM_TYPE_ENIGMA(1), -- Enigma Logic
# PA_SAM_TYPE_DIGI_PATH(2), -- Digital Pathways
# PA_SAM_TYPE_SKEY_K0(3), -- S/key where KDC has key 0
# PA_SAM_TYPE_SKEY(4), -- Traditional S/Key
# PA_SAM_TYPE_SECURID(5), -- Security Dynamics
# PA_SAM_TYPE_CRYPTOCARD(6) -- CRYPTOCard
#}
#
#PA-SAM-REDIRECT ::= HostAddresses
#
#SAMFlags ::= BIT STRING {
# use-sad-as-key(0),
# send-encrypted-sad(1),
# must-pk-encrypt-sad(2)
#}
#
#PA-SAM-CHALLENGE-2-BODY ::= SEQUENCE {
# sam-type[0] krb5int32,
# sam-flags[1] SAMFlags,
# sam-type-name[2] GeneralString OPTIONAL,
# sam-track-id[3] GeneralString OPTIONAL,
# sam-challenge-label[4] GeneralString OPTIONAL,
# sam-challenge[5] GeneralString OPTIONAL,
# sam-response-prompt[6] GeneralString OPTIONAL,
# sam-pk-for-sad[7] EncryptionKey OPTIONAL,
# sam-nonce[8] krb5int32,
# sam-etype[9] krb5int32,
# ...
#}
#
#PA-SAM-CHALLENGE-2 ::= SEQUENCE {
# sam-body[0] PA-SAM-CHALLENGE-2-BODY,
# sam-cksum[1] SEQUENCE OF Checksum, -- (1..MAX)
# ...
#}
#
#PA-SAM-RESPONSE-2 ::= SEQUENCE {
# sam-type[0] krb5int32,
# sam-flags[1] SAMFlags,
# sam-track-id[2] GeneralString OPTIONAL,
# sam-enc-nonce-or-sad[3] EncryptedData, -- PA-ENC-SAM-RESPONSE-ENC
# sam-nonce[4] krb5int32,
# ...
#}
#
#PA-ENC-SAM-RESPONSE-ENC ::= SEQUENCE {
# sam-nonce[0] krb5int32,
# sam-sad[1] GeneralString OPTIONAL,
# ...
#}
#
#PA-S4U2Self ::= SEQUENCE {
# name[0] PrincipalName,
# realm[1] Realm,
# cksum[2] Checksum,
# auth[3] GeneralString
#}
#
#
#
#
#
#
#
#
#
#
#
## https://github.com/tiran/kkdcpasn1/blob/asn1crypto/pykkdcpasn1.py
#class EncryptedData(core.Sequence):
# """EncryptedData
# * KDC-REQ-BODY
# * Ticket
# * AP-REQ
# * KRB-PRIV
# EncryptedData ::= SEQUENCE {
# etype [0] Int32,
# kvno [1] UInt32 OPTIONAL,
# cipher [2] OCTET STRING
# }
# """
# _fields = [
# ('etype', Int32, {'tag_type': TAG, 'tag': 0}),
# ('kvno', UInt32, {'tag_type': TAG, 'tag': 1, 'optional': True}),
# ('cipher', core.OctetString, {'tag_type': TAG, 'tag': 2}),
#]
#
#class EncryptionKey(core.Sequence):
# """
# EncryptionKey ::= SEQUENCE {
# keytype[0] krb5int32,
# keyvalue[1] OCTET STRING
# }
# """
# _fields = [
# ('keytype', Int32, {'tag_type': TAG, 'tag': 0}),
# ('keyvalue', core.OctetString, {'tag_type': TAG, 'tag': 1}),
#]
#
#
#
#
#
#
#
#
#class SequenceOfInt32(core.SequenceOf):
# """SEQUENCE OF Int32 for KDC-REQ-BODY
# """
# _child_spec = Int32
#
#
#
#class SequenceOfKrbCredInfo(core.SequenceOf):
# _child_spec = KrbCredInfo
#
#
#class EncKrbCredPart(core.Sequence):
# explicit = (1, 29)
#
# _fields = [
# ('ticket-info', SequenceOfKrbCredInfo, {'tag_type': TAG, 'tag': 0}),
# ('nonce', Int32, {'tag_type': TAG, 'tag': 1, 'optional': True}),
# ('timestamp', KerberosTime , {'tag_type': TAG, 'tag': 2, 'optional': True}),
# ('usec', Microseconds , {'tag_type': TAG, 'tag': 3, 'optional': True}),
# ('s-address', HostAddress , {'tag_type': TAG, 'tag': 4, 'optional': True}),
# ('r-address', HostAddress , {'tag_type': TAG, 'tag': 5, 'optional': True}),
# ]
#
#
#
| 31.636653 | 175 | 0.618215 |
793ed341585b2b773162a6f3e1fd7fad089d5333 | 1,388 | py | Python | preview.py | GY-CAS/learn | 491eed7bd00b1bc010492dfdee273a1318b441e3 | [
"MIT"
] | 113 | 2020-09-24T09:31:41.000Z | 2021-07-05T02:16:50.000Z | preview.py | GY-CAS/learn | 491eed7bd00b1bc010492dfdee273a1318b441e3 | [
"MIT"
] | 15 | 2020-12-16T09:31:32.000Z | 2021-06-22T05:06:06.000Z | preview.py | GY-CAS/learn | 491eed7bd00b1bc010492dfdee273a1318b441e3 | [
"MIT"
] | 19 | 2020-12-16T07:15:16.000Z | 2021-07-03T18:24:21.000Z | # encoding: utf-8
import torch
import ipdb
import cv2
import numpy as np
from options import opt, config
# from dataloader import paired_dataset
from mscv.summary import create_summary_writer, write_image
from mscv.image import tensor2im
from dataloader.dataloaders import train_dataloader, val_dataloader
from dataloader import voc
from utils.vis import visualize_boxes
import misc_utils as utils
import random
import albumentations as A
from albumentations.pytorch.transforms import ToTensorV2
"""
source domain 是clear的
"""
writer = create_summary_writer('logs/preview')
# class_names = config.DATA.CLASS_NAMES
class_names = config.DATA.CLASS_NAMES
preview = train_dataloader # train_dataloader, val_dataloader
for i, sample in enumerate(preview):
# if i > 30:
# break
utils.progress_bar(i, len(preview), 'Handling...')
if opt.debug:
ipdb.set_trace()
image = sample['image'][0].detach().cpu().numpy().transpose([1,2,0])
image = (image.copy()*255).astype(np.uint8)
bboxes = sample['bboxes'][0].cpu().numpy()
labels = sample['labels'][0].cpu().numpy().astype(np.int32)
visualize_boxes(image=image, boxes=bboxes, labels=labels, probs=np.array(np.random.randint(100, 101, size=[len(bboxes)])/100), class_labels=class_names)
write_image(writer, f'preview_{config.DATA.DATASET}/{i}', 'image', image, 0, 'HWC')
writer.flush()
| 27.215686 | 156 | 0.737752 |
793ed35ada7699c4a6bd35235e44a3fecad0d658 | 410 | py | Python | news/migrations/0029_article_views.py | hackerspace-ntnu/website | 0c296cb39759778aaf0c296027345a658414b397 | [
"MIT"
] | 25 | 2016-04-13T20:25:37.000Z | 2021-11-26T14:41:00.000Z | news/migrations/0029_article_views.py | hackerspace-ntnu/website | 0c296cb39759778aaf0c296027345a658414b397 | [
"MIT"
] | 358 | 2016-02-20T21:13:27.000Z | 2022-03-31T20:06:03.000Z | news/migrations/0029_article_views.py | hackerspace-ntnu/website | 0c296cb39759778aaf0c296027345a658414b397 | [
"MIT"
] | 7 | 2016-04-18T14:03:15.000Z | 2022-02-04T14:19:47.000Z | # Generated by Django 3.2.7 on 2021-09-23 11:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('news', '0028_auto_20210210_2032'),
]
operations = [
migrations.AddField(
model_name='article',
name='views',
field=models.IntegerField(default=0, verbose_name='Sidevisninger'),
),
]
| 21.578947 | 79 | 0.609756 |
793ed3afaed0ff5f8aa186091f678640bfaeb21a | 65,592 | py | Python | Lib/idlelib/editor.py | LeslieGerman/cpython | 1f21eaa15e8a0d2b0f78d0e3f2b9e5b458eb0a70 | [
"CNRI-Python-GPL-Compatible"
] | 2 | 2019-09-03T09:56:49.000Z | 2019-09-18T03:26:17.000Z | Lib/idlelib/editor.py | Timesile/cpython | 5dbe0f59b7a4f39c7c606b48056bc29e406ebf78 | [
"CNRI-Python-GPL-Compatible"
] | 1 | 2019-09-10T07:12:05.000Z | 2019-09-10T07:12:05.000Z | Lib/idlelib/editor.py | Timesile/cpython | 5dbe0f59b7a4f39c7c606b48056bc29e406ebf78 | [
"CNRI-Python-GPL-Compatible"
] | null | null | null | import importlib.abc
import importlib.util
import os
import platform
import re
import string
import sys
import tokenize
import traceback
import webbrowser
from tkinter import *
from tkinter.font import Font
from tkinter.ttk import Scrollbar
import tkinter.simpledialog as tkSimpleDialog
import tkinter.messagebox as tkMessageBox
from idlelib.config import idleConf
from idlelib import configdialog
from idlelib import grep
from idlelib import help
from idlelib import help_about
from idlelib import macosx
from idlelib.multicall import MultiCallCreator
from idlelib import pyparse
from idlelib import query
from idlelib import replace
from idlelib import search
from idlelib import window
# The default tab setting for a Text widget, in average-width characters.
TK_TABWIDTH_DEFAULT = 8
_py_version = ' (%s)' % platform.python_version()
darwin = sys.platform == 'darwin'
def _sphinx_version():
"Format sys.version_info to produce the Sphinx version string used to install the chm docs"
major, minor, micro, level, serial = sys.version_info
release = '%s%s' % (major, minor)
release += '%s' % (micro,)
if level == 'candidate':
release += 'rc%s' % (serial,)
elif level != 'final':
release += '%s%s' % (level[0], serial)
return release
class EditorWindow(object):
from idlelib.percolator import Percolator
from idlelib.colorizer import ColorDelegator, color_config
from idlelib.undo import UndoDelegator
from idlelib.iomenu import IOBinding, encoding
from idlelib import mainmenu
from idlelib.statusbar import MultiStatusBar
from idlelib.autocomplete import AutoComplete
from idlelib.autoexpand import AutoExpand
from idlelib.calltip import Calltip
from idlelib.codecontext import CodeContext
from idlelib.sidebar import LineNumbers
from idlelib.format import FormatParagraph, FormatRegion, Indents, Rstrip
from idlelib.parenmatch import ParenMatch
from idlelib.squeezer import Squeezer
from idlelib.zoomheight import ZoomHeight
filesystemencoding = sys.getfilesystemencoding() # for file names
help_url = None
allow_code_context = True
allow_line_numbers = True
def __init__(self, flist=None, filename=None, key=None, root=None):
# Delay import: runscript imports pyshell imports EditorWindow.
from idlelib.runscript import ScriptBinding
if EditorWindow.help_url is None:
dochome = os.path.join(sys.base_prefix, 'Doc', 'index.html')
if sys.platform.count('linux'):
# look for html docs in a couple of standard places
pyver = 'python-docs-' + '%s.%s.%s' % sys.version_info[:3]
if os.path.isdir('/var/www/html/python/'): # "python2" rpm
dochome = '/var/www/html/python/index.html'
else:
basepath = '/usr/share/doc/' # standard location
dochome = os.path.join(basepath, pyver,
'Doc', 'index.html')
elif sys.platform[:3] == 'win':
chmfile = os.path.join(sys.base_prefix, 'Doc',
'Python%s.chm' % _sphinx_version())
if os.path.isfile(chmfile):
dochome = chmfile
elif sys.platform == 'darwin':
# documentation may be stored inside a python framework
dochome = os.path.join(sys.base_prefix,
'Resources/English.lproj/Documentation/index.html')
dochome = os.path.normpath(dochome)
if os.path.isfile(dochome):
EditorWindow.help_url = dochome
if sys.platform == 'darwin':
# Safari requires real file:-URLs
EditorWindow.help_url = 'file://' + EditorWindow.help_url
else:
EditorWindow.help_url = ("https://docs.python.org/%d.%d/"
% sys.version_info[:2])
self.flist = flist
root = root or flist.root
self.root = root
self.menubar = Menu(root)
self.top = top = window.ListedToplevel(root, menu=self.menubar)
if flist:
self.tkinter_vars = flist.vars
#self.top.instance_dict makes flist.inversedict available to
#configdialog.py so it can access all EditorWindow instances
self.top.instance_dict = flist.inversedict
else:
self.tkinter_vars = {} # keys: Tkinter event names
# values: Tkinter variable instances
self.top.instance_dict = {}
self.recent_files_path = os.path.join(
idleConf.userdir, 'recent-files.lst')
self.prompt_last_line = '' # Override in PyShell
self.text_frame = text_frame = Frame(top)
self.vbar = vbar = Scrollbar(text_frame, name='vbar')
width = idleConf.GetOption('main', 'EditorWindow', 'width', type='int')
text_options = {
'name': 'text',
'padx': 5,
'wrap': 'none',
'highlightthickness': 0,
'width': width,
'tabstyle': 'wordprocessor', # new in 8.5
'height': idleConf.GetOption(
'main', 'EditorWindow', 'height', type='int'),
}
self.text = text = MultiCallCreator(Text)(text_frame, **text_options)
self.top.focused_widget = self.text
self.createmenubar()
self.apply_bindings()
self.top.protocol("WM_DELETE_WINDOW", self.close)
self.top.bind("<<close-window>>", self.close_event)
if macosx.isAquaTk():
# Command-W on editor windows doesn't work without this.
text.bind('<<close-window>>', self.close_event)
# Some OS X systems have only one mouse button, so use
# control-click for popup context menus there. For two
# buttons, AquaTk defines <2> as the right button, not <3>.
text.bind("<Control-Button-1>",self.right_menu_event)
text.bind("<2>", self.right_menu_event)
else:
# Elsewhere, use right-click for popup menus.
text.bind("<3>",self.right_menu_event)
text.bind('<MouseWheel>', self.mousescroll)
text.bind('<Button-4>', self.mousescroll)
text.bind('<Button-5>', self.mousescroll)
text.bind('<Configure>', self.handle_winconfig)
text.bind("<<cut>>", self.cut)
text.bind("<<copy>>", self.copy)
text.bind("<<paste>>", self.paste)
text.bind("<<center-insert>>", self.center_insert_event)
text.bind("<<help>>", self.help_dialog)
text.bind("<<python-docs>>", self.python_docs)
text.bind("<<about-idle>>", self.about_dialog)
text.bind("<<open-config-dialog>>", self.config_dialog)
text.bind("<<open-module>>", self.open_module_event)
text.bind("<<do-nothing>>", lambda event: "break")
text.bind("<<select-all>>", self.select_all)
text.bind("<<remove-selection>>", self.remove_selection)
text.bind("<<find>>", self.find_event)
text.bind("<<find-again>>", self.find_again_event)
text.bind("<<find-in-files>>", self.find_in_files_event)
text.bind("<<find-selection>>", self.find_selection_event)
text.bind("<<replace>>", self.replace_event)
text.bind("<<goto-line>>", self.goto_line_event)
text.bind("<<smart-backspace>>",self.smart_backspace_event)
text.bind("<<newline-and-indent>>",self.newline_and_indent_event)
text.bind("<<smart-indent>>",self.smart_indent_event)
self.fregion = fregion = self.FormatRegion(self)
# self.fregion used in smart_indent_event to access indent_region.
text.bind("<<indent-region>>", fregion.indent_region_event)
text.bind("<<dedent-region>>", fregion.dedent_region_event)
text.bind("<<comment-region>>", fregion.comment_region_event)
text.bind("<<uncomment-region>>", fregion.uncomment_region_event)
text.bind("<<tabify-region>>", fregion.tabify_region_event)
text.bind("<<untabify-region>>", fregion.untabify_region_event)
text.bind("<<toggle-tabs>>", self.Indents.toggle_tabs_event)
text.bind("<<change-indentwidth>>", self.Indents.change_indentwidth_event)
text.bind("<Left>", self.move_at_edge_if_selection(0))
text.bind("<Right>", self.move_at_edge_if_selection(1))
text.bind("<<del-word-left>>", self.del_word_left)
text.bind("<<del-word-right>>", self.del_word_right)
text.bind("<<beginning-of-line>>", self.home_callback)
if flist:
flist.inversedict[self] = key
if key:
flist.dict[key] = self
text.bind("<<open-new-window>>", self.new_callback)
text.bind("<<close-all-windows>>", self.flist.close_all_callback)
text.bind("<<open-class-browser>>", self.open_module_browser)
text.bind("<<open-path-browser>>", self.open_path_browser)
text.bind("<<open-turtle-demo>>", self.open_turtle_demo)
self.set_status_bar()
text_frame.pack(side=LEFT, fill=BOTH, expand=1)
text_frame.rowconfigure(1, weight=1)
text_frame.columnconfigure(1, weight=1)
vbar['command'] = self.handle_yview
vbar.grid(row=1, column=2, sticky=NSEW)
text['yscrollcommand'] = vbar.set
text['font'] = idleConf.GetFont(self.root, 'main', 'EditorWindow')
text.grid(row=1, column=1, sticky=NSEW)
text.focus_set()
self.set_width()
# usetabs true -> literal tab characters are used by indent and
# dedent cmds, possibly mixed with spaces if
# indentwidth is not a multiple of tabwidth,
# which will cause Tabnanny to nag!
# false -> tab characters are converted to spaces by indent
# and dedent cmds, and ditto TAB keystrokes
# Although use-spaces=0 can be configured manually in config-main.def,
# configuration of tabs v. spaces is not supported in the configuration
# dialog. IDLE promotes the preferred Python indentation: use spaces!
usespaces = idleConf.GetOption('main', 'Indent',
'use-spaces', type='bool')
self.usetabs = not usespaces
# tabwidth is the display width of a literal tab character.
# CAUTION: telling Tk to use anything other than its default
# tab setting causes it to use an entirely different tabbing algorithm,
# treating tab stops as fixed distances from the left margin.
# Nobody expects this, so for now tabwidth should never be changed.
self.tabwidth = 8 # must remain 8 until Tk is fixed.
# indentwidth is the number of screen characters per indent level.
# The recommended Python indentation is four spaces.
self.indentwidth = self.tabwidth
self.set_notabs_indentwidth()
# When searching backwards for a reliable place to begin parsing,
# first start num_context_lines[0] lines back, then
# num_context_lines[1] lines back if that didn't work, and so on.
# The last value should be huge (larger than the # of lines in a
# conceivable file).
# Making the initial values larger slows things down more often.
self.num_context_lines = 50, 500, 5000000
self.per = per = self.Percolator(text)
self.undo = undo = self.UndoDelegator()
per.insertfilter(undo)
text.undo_block_start = undo.undo_block_start
text.undo_block_stop = undo.undo_block_stop
undo.set_saved_change_hook(self.saved_change_hook)
# IOBinding implements file I/O and printing functionality
self.io = io = self.IOBinding(self)
io.set_filename_change_hook(self.filename_change_hook)
self.good_load = False
self.set_indentation_params(False)
self.color = None # initialized below in self.ResetColorizer
self.code_context = None # optionally initialized later below
self.line_numbers = None # optionally initialized later below
if filename:
if os.path.exists(filename) and not os.path.isdir(filename):
if io.loadfile(filename):
self.good_load = True
is_py_src = self.ispythonsource(filename)
self.set_indentation_params(is_py_src)
else:
io.set_filename(filename)
self.good_load = True
self.ResetColorizer()
self.saved_change_hook()
self.update_recent_files_list()
self.load_extensions()
menu = self.menudict.get('window')
if menu:
end = menu.index("end")
if end is None:
end = -1
if end >= 0:
menu.add_separator()
end = end + 1
self.wmenu_end = end
window.register_callback(self.postwindowsmenu)
# Some abstractions so IDLE extensions are cross-IDE
self.askyesno = tkMessageBox.askyesno
self.askinteger = tkSimpleDialog.askinteger
self.showerror = tkMessageBox.showerror
# Add pseudoevents for former extension fixed keys.
# (This probably needs to be done once in the process.)
text.event_add('<<autocomplete>>', '<Key-Tab>')
text.event_add('<<try-open-completions>>', '<KeyRelease-period>',
'<KeyRelease-slash>', '<KeyRelease-backslash>')
text.event_add('<<try-open-calltip>>', '<KeyRelease-parenleft>')
text.event_add('<<refresh-calltip>>', '<KeyRelease-parenright>')
text.event_add('<<paren-closed>>', '<KeyRelease-parenright>',
'<KeyRelease-bracketright>', '<KeyRelease-braceright>')
# Former extension bindings depends on frame.text being packed
# (called from self.ResetColorizer()).
autocomplete = self.AutoComplete(self)
text.bind("<<autocomplete>>", autocomplete.autocomplete_event)
text.bind("<<try-open-completions>>",
autocomplete.try_open_completions_event)
text.bind("<<force-open-completions>>",
autocomplete.force_open_completions_event)
text.bind("<<expand-word>>", self.AutoExpand(self).expand_word_event)
text.bind("<<format-paragraph>>",
self.FormatParagraph(self).format_paragraph_event)
parenmatch = self.ParenMatch(self)
text.bind("<<flash-paren>>", parenmatch.flash_paren_event)
text.bind("<<paren-closed>>", parenmatch.paren_closed_event)
scriptbinding = ScriptBinding(self)
text.bind("<<check-module>>", scriptbinding.check_module_event)
text.bind("<<run-module>>", scriptbinding.run_module_event)
text.bind("<<run-custom>>", scriptbinding.run_custom_event)
text.bind("<<do-rstrip>>", self.Rstrip(self).do_rstrip)
ctip = self.Calltip(self)
text.bind("<<try-open-calltip>>", ctip.try_open_calltip_event)
#refresh-calltip must come after paren-closed to work right
text.bind("<<refresh-calltip>>", ctip.refresh_calltip_event)
text.bind("<<force-open-calltip>>", ctip.force_open_calltip_event)
text.bind("<<zoom-height>>", self.ZoomHeight(self).zoom_height_event)
if self.allow_code_context:
self.code_context = self.CodeContext(self)
text.bind("<<toggle-code-context>>",
self.code_context.toggle_code_context_event)
else:
self.update_menu_state('options', '*Code Context', 'disabled')
if self.allow_line_numbers:
self.line_numbers = self.LineNumbers(self)
if idleConf.GetOption('main', 'EditorWindow',
'line-numbers-default', type='bool'):
self.toggle_line_numbers_event()
text.bind("<<toggle-line-numbers>>", self.toggle_line_numbers_event)
else:
self.update_menu_state('options', '*Line Numbers', 'disabled')
def handle_winconfig(self, event=None):
self.set_width()
def set_width(self):
text = self.text
inner_padding = sum(map(text.tk.getint, [text.cget('border'),
text.cget('padx')]))
pixel_width = text.winfo_width() - 2 * inner_padding
# Divide the width of the Text widget by the font width,
# which is taken to be the width of '0' (zero).
# http://www.tcl.tk/man/tcl8.6/TkCmd/text.htm#M21
zero_char_width = \
Font(text, font=text.cget('font')).measure('0')
self.width = pixel_width // zero_char_width
def _filename_to_unicode(self, filename):
"""Return filename as BMP unicode so displayable in Tk."""
# Decode bytes to unicode.
if isinstance(filename, bytes):
try:
filename = filename.decode(self.filesystemencoding)
except UnicodeDecodeError:
try:
filename = filename.decode(self.encoding)
except UnicodeDecodeError:
# byte-to-byte conversion
filename = filename.decode('iso8859-1')
# Replace non-BMP char with diamond questionmark.
return re.sub('[\U00010000-\U0010FFFF]', '\ufffd', filename)
def new_callback(self, event):
dirname, basename = self.io.defaultfilename()
self.flist.new(dirname)
return "break"
def home_callback(self, event):
if (event.state & 4) != 0 and event.keysym == "Home":
# state&4==Control. If <Control-Home>, use the Tk binding.
return None
if self.text.index("iomark") and \
self.text.compare("iomark", "<=", "insert lineend") and \
self.text.compare("insert linestart", "<=", "iomark"):
# In Shell on input line, go to just after prompt
insertpt = int(self.text.index("iomark").split(".")[1])
else:
line = self.text.get("insert linestart", "insert lineend")
for insertpt in range(len(line)):
if line[insertpt] not in (' ','\t'):
break
else:
insertpt=len(line)
lineat = int(self.text.index("insert").split('.')[1])
if insertpt == lineat:
insertpt = 0
dest = "insert linestart+"+str(insertpt)+"c"
if (event.state&1) == 0:
# shift was not pressed
self.text.tag_remove("sel", "1.0", "end")
else:
if not self.text.index("sel.first"):
# there was no previous selection
self.text.mark_set("my_anchor", "insert")
else:
if self.text.compare(self.text.index("sel.first"), "<",
self.text.index("insert")):
self.text.mark_set("my_anchor", "sel.first") # extend back
else:
self.text.mark_set("my_anchor", "sel.last") # extend forward
first = self.text.index(dest)
last = self.text.index("my_anchor")
if self.text.compare(first,">",last):
first,last = last,first
self.text.tag_remove("sel", "1.0", "end")
self.text.tag_add("sel", first, last)
self.text.mark_set("insert", dest)
self.text.see("insert")
return "break"
def set_status_bar(self):
self.status_bar = self.MultiStatusBar(self.top)
sep = Frame(self.top, height=1, borderwidth=1, background='grey75')
if sys.platform == "darwin":
# Insert some padding to avoid obscuring some of the statusbar
# by the resize widget.
self.status_bar.set_label('_padding1', ' ', side=RIGHT)
self.status_bar.set_label('column', 'Col: ?', side=RIGHT)
self.status_bar.set_label('line', 'Ln: ?', side=RIGHT)
self.status_bar.pack(side=BOTTOM, fill=X)
sep.pack(side=BOTTOM, fill=X)
self.text.bind("<<set-line-and-column>>", self.set_line_and_column)
self.text.event_add("<<set-line-and-column>>",
"<KeyRelease>", "<ButtonRelease>")
self.text.after_idle(self.set_line_and_column)
def set_line_and_column(self, event=None):
line, column = self.text.index(INSERT).split('.')
self.status_bar.set_label('column', 'Col: %s' % column)
self.status_bar.set_label('line', 'Ln: %s' % line)
menu_specs = [
("file", "_File"),
("edit", "_Edit"),
("format", "F_ormat"),
("run", "_Run"),
("options", "_Options"),
("window", "_Window"),
("help", "_Help"),
]
def createmenubar(self):
mbar = self.menubar
self.menudict = menudict = {}
for name, label in self.menu_specs:
underline, label = prepstr(label)
menudict[name] = menu = Menu(mbar, name=name, tearoff=0)
mbar.add_cascade(label=label, menu=menu, underline=underline)
if macosx.isCarbonTk():
# Insert the application menu
menudict['application'] = menu = Menu(mbar, name='apple',
tearoff=0)
mbar.add_cascade(label='IDLE', menu=menu)
self.fill_menus()
self.recent_files_menu = Menu(self.menubar, tearoff=0)
self.menudict['file'].insert_cascade(3, label='Recent Files',
underline=0,
menu=self.recent_files_menu)
self.base_helpmenu_length = self.menudict['help'].index(END)
self.reset_help_menu_entries()
def postwindowsmenu(self):
# Only called when Window menu exists
menu = self.menudict['window']
end = menu.index("end")
if end is None:
end = -1
if end > self.wmenu_end:
menu.delete(self.wmenu_end+1, end)
window.add_windows_to_menu(menu)
def update_menu_label(self, menu, index, label):
"Update label for menu item at index."
menuitem = self.menudict[menu]
menuitem.entryconfig(index, label=label)
def update_menu_state(self, menu, index, state):
"Update state for menu item at index."
menuitem = self.menudict[menu]
menuitem.entryconfig(index, state=state)
def handle_yview(self, event, *args):
"Handle scrollbar."
if event == 'moveto':
fraction = float(args[0])
lines = (round(self.getlineno('end') * fraction) -
self.getlineno('@0,0'))
event = 'scroll'
args = (lines, 'units')
self.text.yview(event, *args)
return 'break'
def mousescroll(self, event):
"""Handle scrollwheel event.
For wheel up, event.delta = 120*n on Windows, -1*n on darwin,
where n can be > 1 if one scrolls fast. Flicking the wheel
generates up to maybe 20 events with n up to 10 or more 1.
Macs use wheel down (delta = 1*n) to scroll up, so positive
delta means to scroll up on both systems.
X-11 sends Control-Button-4 event instead.
"""
up = {EventType.MouseWheel: event.delta > 0,
EventType.Button: event.num == 4}
lines = -5 if up[event.type] else 5
self.text.yview_scroll(lines, 'units')
return 'break'
rmenu = None
def right_menu_event(self, event):
self.text.mark_set("insert", "@%d,%d" % (event.x, event.y))
if not self.rmenu:
self.make_rmenu()
rmenu = self.rmenu
self.event = event
iswin = sys.platform[:3] == 'win'
if iswin:
self.text.config(cursor="arrow")
for item in self.rmenu_specs:
try:
label, eventname, verify_state = item
except ValueError: # see issue1207589
continue
if verify_state is None:
continue
state = getattr(self, verify_state)()
rmenu.entryconfigure(label, state=state)
rmenu.tk_popup(event.x_root, event.y_root)
if iswin:
self.text.config(cursor="ibeam")
return "break"
rmenu_specs = [
# ("Label", "<<virtual-event>>", "statefuncname"), ...
("Close", "<<close-window>>", None), # Example
]
def make_rmenu(self):
rmenu = Menu(self.text, tearoff=0)
for item in self.rmenu_specs:
label, eventname = item[0], item[1]
if label is not None:
def command(text=self.text, eventname=eventname):
text.event_generate(eventname)
rmenu.add_command(label=label, command=command)
else:
rmenu.add_separator()
self.rmenu = rmenu
def rmenu_check_cut(self):
return self.rmenu_check_copy()
def rmenu_check_copy(self):
try:
indx = self.text.index('sel.first')
except TclError:
return 'disabled'
else:
return 'normal' if indx else 'disabled'
def rmenu_check_paste(self):
try:
self.text.tk.call('tk::GetSelection', self.text, 'CLIPBOARD')
except TclError:
return 'disabled'
else:
return 'normal'
def about_dialog(self, event=None):
"Handle Help 'About IDLE' event."
# Synchronize with macosx.overrideRootMenu.about_dialog.
help_about.AboutDialog(self.top)
return "break"
def config_dialog(self, event=None):
"Handle Options 'Configure IDLE' event."
# Synchronize with macosx.overrideRootMenu.config_dialog.
configdialog.ConfigDialog(self.top,'Settings')
return "break"
def help_dialog(self, event=None):
"Handle Help 'IDLE Help' event."
# Synchronize with macosx.overrideRootMenu.help_dialog.
if self.root:
parent = self.root
else:
parent = self.top
help.show_idlehelp(parent)
return "break"
def python_docs(self, event=None):
if sys.platform[:3] == 'win':
try:
os.startfile(self.help_url)
except OSError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
webbrowser.open(self.help_url)
return "break"
def cut(self,event):
self.text.event_generate("<<Cut>>")
return "break"
def copy(self,event):
if not self.text.tag_ranges("sel"):
# There is no selection, so do nothing and maybe interrupt.
return None
self.text.event_generate("<<Copy>>")
return "break"
def paste(self,event):
self.text.event_generate("<<Paste>>")
self.text.see("insert")
return "break"
def select_all(self, event=None):
self.text.tag_add("sel", "1.0", "end-1c")
self.text.mark_set("insert", "1.0")
self.text.see("insert")
return "break"
def remove_selection(self, event=None):
self.text.tag_remove("sel", "1.0", "end")
self.text.see("insert")
return "break"
def move_at_edge_if_selection(self, edge_index):
"""Cursor move begins at start or end of selection
When a left/right cursor key is pressed create and return to Tkinter a
function which causes a cursor move from the associated edge of the
selection.
"""
self_text_index = self.text.index
self_text_mark_set = self.text.mark_set
edges_table = ("sel.first+1c", "sel.last-1c")
def move_at_edge(event):
if (event.state & 5) == 0: # no shift(==1) or control(==4) pressed
try:
self_text_index("sel.first")
self_text_mark_set("insert", edges_table[edge_index])
except TclError:
pass
return move_at_edge
def del_word_left(self, event):
self.text.event_generate('<Meta-Delete>')
return "break"
def del_word_right(self, event):
self.text.event_generate('<Meta-d>')
return "break"
def find_event(self, event):
search.find(self.text)
return "break"
def find_again_event(self, event):
search.find_again(self.text)
return "break"
def find_selection_event(self, event):
search.find_selection(self.text)
return "break"
def find_in_files_event(self, event):
grep.grep(self.text, self.io, self.flist)
return "break"
def replace_event(self, event):
replace.replace(self.text)
return "break"
def goto_line_event(self, event):
text = self.text
lineno = tkSimpleDialog.askinteger("Goto",
"Go to line number:",parent=text)
if lineno is None:
return "break"
if lineno <= 0:
text.bell()
return "break"
text.mark_set("insert", "%d.0" % lineno)
text.see("insert")
return "break"
def open_module(self):
"""Get module name from user and open it.
Return module path or None for calls by open_module_browser
when latter is not invoked in named editor window.
"""
# XXX This, open_module_browser, and open_path_browser
# would fit better in iomenu.IOBinding.
try:
name = self.text.get("sel.first", "sel.last").strip()
except TclError:
name = ''
file_path = query.ModuleName(
self.text, "Open Module",
"Enter the name of a Python module\n"
"to search on sys.path and open:",
name).result
if file_path is not None:
if self.flist:
self.flist.open(file_path)
else:
self.io.loadfile(file_path)
return file_path
def open_module_event(self, event):
self.open_module()
return "break"
def open_module_browser(self, event=None):
filename = self.io.filename
if not (self.__class__.__name__ == 'PyShellEditorWindow'
and filename):
filename = self.open_module()
if filename is None:
return "break"
from idlelib import browser
browser.ModuleBrowser(self.root, filename)
return "break"
def open_path_browser(self, event=None):
from idlelib import pathbrowser
pathbrowser.PathBrowser(self.root)
return "break"
def open_turtle_demo(self, event = None):
import subprocess
cmd = [sys.executable,
'-c',
'from turtledemo.__main__ import main; main()']
subprocess.Popen(cmd, shell=False)
return "break"
def gotoline(self, lineno):
if lineno is not None and lineno > 0:
self.text.mark_set("insert", "%d.0" % lineno)
self.text.tag_remove("sel", "1.0", "end")
self.text.tag_add("sel", "insert", "insert +1l")
self.center()
def ispythonsource(self, filename):
if not filename or os.path.isdir(filename):
return True
base, ext = os.path.splitext(os.path.basename(filename))
if os.path.normcase(ext) in (".py", ".pyw"):
return True
line = self.text.get('1.0', '1.0 lineend')
return line.startswith('#!') and 'python' in line
def close_hook(self):
if self.flist:
self.flist.unregister_maybe_terminate(self)
self.flist = None
def set_close_hook(self, close_hook):
self.close_hook = close_hook
def filename_change_hook(self):
if self.flist:
self.flist.filename_changed_edit(self)
self.saved_change_hook()
self.top.update_windowlist_registry(self)
self.ResetColorizer()
def _addcolorizer(self):
if self.color:
return
if self.ispythonsource(self.io.filename):
self.color = self.ColorDelegator()
# can add more colorizers here...
if self.color:
self.per.removefilter(self.undo)
self.per.insertfilter(self.color)
self.per.insertfilter(self.undo)
def _rmcolorizer(self):
if not self.color:
return
self.color.removecolors()
self.per.removefilter(self.color)
self.color = None
def ResetColorizer(self):
"Update the color theme"
# Called from self.filename_change_hook and from configdialog.py
self._rmcolorizer()
self._addcolorizer()
EditorWindow.color_config(self.text)
if self.code_context is not None:
self.code_context.update_highlight_colors()
if self.line_numbers is not None:
self.line_numbers.update_colors()
IDENTCHARS = string.ascii_letters + string.digits + "_"
def colorize_syntax_error(self, text, pos):
text.tag_add("ERROR", pos)
char = text.get(pos)
if char and char in self.IDENTCHARS:
text.tag_add("ERROR", pos + " wordstart", pos)
if '\n' == text.get(pos): # error at line end
text.mark_set("insert", pos)
else:
text.mark_set("insert", pos + "+1c")
text.see(pos)
def ResetFont(self):
"Update the text widgets' font if it is changed"
# Called from configdialog.py
# Update the code context widget first, since its height affects
# the height of the text widget. This avoids double re-rendering.
if self.code_context is not None:
self.code_context.update_font()
# Next, update the line numbers widget, since its width affects
# the width of the text widget.
if self.line_numbers is not None:
self.line_numbers.update_font()
# Finally, update the main text widget.
new_font = idleConf.GetFont(self.root, 'main', 'EditorWindow')
self.text['font'] = new_font
self.set_width()
def RemoveKeybindings(self):
"Remove the keybindings before they are changed."
# Called from configdialog.py
self.mainmenu.default_keydefs = keydefs = idleConf.GetCurrentKeySet()
for event, keylist in keydefs.items():
self.text.event_delete(event, *keylist)
for extensionName in self.get_standard_extension_names():
xkeydefs = idleConf.GetExtensionBindings(extensionName)
if xkeydefs:
for event, keylist in xkeydefs.items():
self.text.event_delete(event, *keylist)
def ApplyKeybindings(self):
"Update the keybindings after they are changed"
# Called from configdialog.py
self.mainmenu.default_keydefs = keydefs = idleConf.GetCurrentKeySet()
self.apply_bindings()
for extensionName in self.get_standard_extension_names():
xkeydefs = idleConf.GetExtensionBindings(extensionName)
if xkeydefs:
self.apply_bindings(xkeydefs)
#update menu accelerators
menuEventDict = {}
for menu in self.mainmenu.menudefs:
menuEventDict[menu[0]] = {}
for item in menu[1]:
if item:
menuEventDict[menu[0]][prepstr(item[0])[1]] = item[1]
for menubarItem in self.menudict:
menu = self.menudict[menubarItem]
end = menu.index(END)
if end is None:
# Skip empty menus
continue
end += 1
for index in range(0, end):
if menu.type(index) == 'command':
accel = menu.entrycget(index, 'accelerator')
if accel:
itemName = menu.entrycget(index, 'label')
event = ''
if menubarItem in menuEventDict:
if itemName in menuEventDict[menubarItem]:
event = menuEventDict[menubarItem][itemName]
if event:
accel = get_accelerator(keydefs, event)
menu.entryconfig(index, accelerator=accel)
def set_notabs_indentwidth(self):
"Update the indentwidth if changed and not using tabs in this window"
# Called from configdialog.py
if not self.usetabs:
self.indentwidth = idleConf.GetOption('main', 'Indent','num-spaces',
type='int')
def reset_help_menu_entries(self):
"Update the additional help entries on the Help menu"
help_list = idleConf.GetAllExtraHelpSourcesList()
helpmenu = self.menudict['help']
# first delete the extra help entries, if any
helpmenu_length = helpmenu.index(END)
if helpmenu_length > self.base_helpmenu_length:
helpmenu.delete((self.base_helpmenu_length + 1), helpmenu_length)
# then rebuild them
if help_list:
helpmenu.add_separator()
for entry in help_list:
cmd = self.__extra_help_callback(entry[1])
helpmenu.add_command(label=entry[0], command=cmd)
# and update the menu dictionary
self.menudict['help'] = helpmenu
def __extra_help_callback(self, helpfile):
"Create a callback with the helpfile value frozen at definition time"
def display_extra_help(helpfile=helpfile):
if not helpfile.startswith(('www', 'http')):
helpfile = os.path.normpath(helpfile)
if sys.platform[:3] == 'win':
try:
os.startfile(helpfile)
except OSError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
webbrowser.open(helpfile)
return display_extra_help
def update_recent_files_list(self, new_file=None):
"Load and update the recent files list and menus"
rf_list = []
if os.path.exists(self.recent_files_path):
with open(self.recent_files_path, 'r',
encoding='utf_8', errors='replace') as rf_list_file:
rf_list = rf_list_file.readlines()
if new_file:
new_file = os.path.abspath(new_file) + '\n'
if new_file in rf_list:
rf_list.remove(new_file) # move to top
rf_list.insert(0, new_file)
# clean and save the recent files list
bad_paths = []
for path in rf_list:
if '\0' in path or not os.path.exists(path[0:-1]):
bad_paths.append(path)
rf_list = [path for path in rf_list if path not in bad_paths]
ulchars = "1234567890ABCDEFGHIJK"
rf_list = rf_list[0:len(ulchars)]
try:
with open(self.recent_files_path, 'w',
encoding='utf_8', errors='replace') as rf_file:
rf_file.writelines(rf_list)
except OSError as err:
if not getattr(self.root, "recentfilelist_error_displayed", False):
self.root.recentfilelist_error_displayed = True
tkMessageBox.showwarning(title='IDLE Warning',
message="Cannot update File menu Recent Files list. "
"Your operating system says:\n%s\n"
"Select OK and IDLE will continue without updating."
% self._filename_to_unicode(str(err)),
parent=self.text)
# for each edit window instance, construct the recent files menu
for instance in self.top.instance_dict:
menu = instance.recent_files_menu
menu.delete(0, END) # clear, and rebuild:
for i, file_name in enumerate(rf_list):
file_name = file_name.rstrip() # zap \n
# make unicode string to display non-ASCII chars correctly
ufile_name = self._filename_to_unicode(file_name)
callback = instance.__recent_file_callback(file_name)
menu.add_command(label=ulchars[i] + " " + ufile_name,
command=callback,
underline=0)
def __recent_file_callback(self, file_name):
def open_recent_file(fn_closure=file_name):
self.io.open(editFile=fn_closure)
return open_recent_file
def saved_change_hook(self):
short = self.short_title()
long = self.long_title()
if short and long:
title = short + " - " + long + _py_version
elif short:
title = short
elif long:
title = long
else:
title = "untitled"
icon = short or long or title
if not self.get_saved():
title = "*%s*" % title
icon = "*%s" % icon
self.top.wm_title(title)
self.top.wm_iconname(icon)
def get_saved(self):
return self.undo.get_saved()
def set_saved(self, flag):
self.undo.set_saved(flag)
def reset_undo(self):
self.undo.reset_undo()
def short_title(self):
filename = self.io.filename
if filename:
filename = os.path.basename(filename)
else:
filename = "untitled"
# return unicode string to display non-ASCII chars correctly
return self._filename_to_unicode(filename)
def long_title(self):
# return unicode string to display non-ASCII chars correctly
return self._filename_to_unicode(self.io.filename or "")
def center_insert_event(self, event):
self.center()
return "break"
def center(self, mark="insert"):
text = self.text
top, bot = self.getwindowlines()
lineno = self.getlineno(mark)
height = bot - top
newtop = max(1, lineno - height//2)
text.yview(float(newtop))
def getwindowlines(self):
text = self.text
top = self.getlineno("@0,0")
bot = self.getlineno("@0,65535")
if top == bot and text.winfo_height() == 1:
# Geometry manager hasn't run yet
height = int(text['height'])
bot = top + height - 1
return top, bot
def getlineno(self, mark="insert"):
text = self.text
return int(float(text.index(mark)))
def get_geometry(self):
"Return (width, height, x, y)"
geom = self.top.wm_geometry()
m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom)
return list(map(int, m.groups()))
def close_event(self, event):
self.close()
return "break"
def maybesave(self):
if self.io:
if not self.get_saved():
if self.top.state()!='normal':
self.top.deiconify()
self.top.lower()
self.top.lift()
return self.io.maybesave()
def close(self):
reply = self.maybesave()
if str(reply) != "cancel":
self._close()
return reply
def _close(self):
if self.io.filename:
self.update_recent_files_list(new_file=self.io.filename)
window.unregister_callback(self.postwindowsmenu)
self.unload_extensions()
self.io.close()
self.io = None
self.undo = None
if self.color:
self.color.close()
self.color = None
self.text = None
self.tkinter_vars = None
self.per.close()
self.per = None
self.top.destroy()
if self.close_hook:
# unless override: unregister from flist, terminate if last window
self.close_hook()
def load_extensions(self):
self.extensions = {}
self.load_standard_extensions()
def unload_extensions(self):
for ins in list(self.extensions.values()):
if hasattr(ins, "close"):
ins.close()
self.extensions = {}
def load_standard_extensions(self):
for name in self.get_standard_extension_names():
try:
self.load_extension(name)
except:
print("Failed to load extension", repr(name))
traceback.print_exc()
def get_standard_extension_names(self):
return idleConf.GetExtensions(editor_only=True)
extfiles = { # Map built-in config-extension section names to file names.
'ZzDummy': 'zzdummy',
}
def load_extension(self, name):
fname = self.extfiles.get(name, name)
try:
try:
mod = importlib.import_module('.' + fname, package=__package__)
except (ImportError, TypeError):
mod = importlib.import_module(fname)
except ImportError:
print("\nFailed to import extension: ", name)
raise
cls = getattr(mod, name)
keydefs = idleConf.GetExtensionBindings(name)
if hasattr(cls, "menudefs"):
self.fill_menus(cls.menudefs, keydefs)
ins = cls(self)
self.extensions[name] = ins
if keydefs:
self.apply_bindings(keydefs)
for vevent in keydefs:
methodname = vevent.replace("-", "_")
while methodname[:1] == '<':
methodname = methodname[1:]
while methodname[-1:] == '>':
methodname = methodname[:-1]
methodname = methodname + "_event"
if hasattr(ins, methodname):
self.text.bind(vevent, getattr(ins, methodname))
def apply_bindings(self, keydefs=None):
if keydefs is None:
keydefs = self.mainmenu.default_keydefs
text = self.text
text.keydefs = keydefs
for event, keylist in keydefs.items():
if keylist:
text.event_add(event, *keylist)
def fill_menus(self, menudefs=None, keydefs=None):
"""Add appropriate entries to the menus and submenus
Menus that are absent or None in self.menudict are ignored.
"""
if menudefs is None:
menudefs = self.mainmenu.menudefs
if keydefs is None:
keydefs = self.mainmenu.default_keydefs
menudict = self.menudict
text = self.text
for mname, entrylist in menudefs:
menu = menudict.get(mname)
if not menu:
continue
for entry in entrylist:
if not entry:
menu.add_separator()
else:
label, eventname = entry
checkbutton = (label[:1] == '!')
if checkbutton:
label = label[1:]
underline, label = prepstr(label)
accelerator = get_accelerator(keydefs, eventname)
def command(text=text, eventname=eventname):
text.event_generate(eventname)
if checkbutton:
var = self.get_var_obj(eventname, BooleanVar)
menu.add_checkbutton(label=label, underline=underline,
command=command, accelerator=accelerator,
variable=var)
else:
menu.add_command(label=label, underline=underline,
command=command,
accelerator=accelerator)
def getvar(self, name):
var = self.get_var_obj(name)
if var:
value = var.get()
return value
else:
raise NameError(name)
def setvar(self, name, value, vartype=None):
var = self.get_var_obj(name, vartype)
if var:
var.set(value)
else:
raise NameError(name)
def get_var_obj(self, name, vartype=None):
var = self.tkinter_vars.get(name)
if not var and vartype:
# create a Tkinter variable object with self.text as master:
self.tkinter_vars[name] = var = vartype(self.text)
return var
# Tk implementations of "virtual text methods" -- each platform
# reusing IDLE's support code needs to define these for its GUI's
# flavor of widget.
# Is character at text_index in a Python string? Return 0 for
# "guaranteed no", true for anything else. This info is expensive
# to compute ab initio, but is probably already known by the
# platform's colorizer.
def is_char_in_string(self, text_index):
if self.color:
# Return true iff colorizer hasn't (re)gotten this far
# yet, or the character is tagged as being in a string
return self.text.tag_prevrange("TODO", text_index) or \
"STRING" in self.text.tag_names(text_index)
else:
# The colorizer is missing: assume the worst
return 1
# If a selection is defined in the text widget, return (start,
# end) as Tkinter text indices, otherwise return (None, None)
def get_selection_indices(self):
try:
first = self.text.index("sel.first")
last = self.text.index("sel.last")
return first, last
except TclError:
return None, None
# Return the text widget's current view of what a tab stop means
# (equivalent width in spaces).
def get_tk_tabwidth(self):
current = self.text['tabs'] or TK_TABWIDTH_DEFAULT
return int(current)
# Set the text widget's current view of what a tab stop means.
def set_tk_tabwidth(self, newtabwidth):
text = self.text
if self.get_tk_tabwidth() != newtabwidth:
# Set text widget tab width
pixels = text.tk.call("font", "measure", text["font"],
"-displayof", text.master,
"n" * newtabwidth)
text.configure(tabs=pixels)
### begin autoindent code ### (configuration was moved to beginning of class)
def set_indentation_params(self, is_py_src, guess=True):
if is_py_src and guess:
i = self.guess_indent()
if 2 <= i <= 8:
self.indentwidth = i
if self.indentwidth != self.tabwidth:
self.usetabs = False
self.set_tk_tabwidth(self.tabwidth)
def smart_backspace_event(self, event):
text = self.text
first, last = self.get_selection_indices()
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
return "break"
# Delete whitespace left, until hitting a real char or closest
# preceding virtual tab stop.
chars = text.get("insert linestart", "insert")
if chars == '':
if text.compare("insert", ">", "1.0"):
# easy: delete preceding newline
text.delete("insert-1c")
else:
text.bell() # at start of buffer
return "break"
if chars[-1] not in " \t":
# easy: delete preceding real char
text.delete("insert-1c")
return "break"
# Ick. It may require *inserting* spaces if we back up over a
# tab character! This is written to be clear, not fast.
tabwidth = self.tabwidth
have = len(chars.expandtabs(tabwidth))
assert have > 0
want = ((have - 1) // self.indentwidth) * self.indentwidth
# Debug prompt is multilined....
ncharsdeleted = 0
while 1:
if chars == self.prompt_last_line: # '' unless PyShell
break
chars = chars[:-1]
ncharsdeleted = ncharsdeleted + 1
have = len(chars.expandtabs(tabwidth))
if have <= want or chars[-1] not in " \t":
break
text.undo_block_start()
text.delete("insert-%dc" % ncharsdeleted, "insert")
if have < want:
text.insert("insert", ' ' * (want - have))
text.undo_block_stop()
return "break"
def smart_indent_event(self, event):
# if intraline selection:
# delete it
# elif multiline selection:
# do indent-region
# else:
# indent one level
text = self.text
first, last = self.get_selection_indices()
text.undo_block_start()
try:
if first and last:
if index2line(first) != index2line(last):
return self.fregion.indent_region_event(event)
text.delete(first, last)
text.mark_set("insert", first)
prefix = text.get("insert linestart", "insert")
raw, effective = get_line_indent(prefix, self.tabwidth)
if raw == len(prefix):
# only whitespace to the left
self.reindent_to(effective + self.indentwidth)
else:
# tab to the next 'stop' within or to right of line's text:
if self.usetabs:
pad = '\t'
else:
effective = len(prefix.expandtabs(self.tabwidth))
n = self.indentwidth
pad = ' ' * (n - effective % n)
text.insert("insert", pad)
text.see("insert")
return "break"
finally:
text.undo_block_stop()
def newline_and_indent_event(self, event):
text = self.text
first, last = self.get_selection_indices()
text.undo_block_start()
try:
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
line = text.get("insert linestart", "insert")
i, n = 0, len(line)
while i < n and line[i] in " \t":
i = i+1
if i == n:
# the cursor is in or at leading indentation in a continuation
# line; just inject an empty line at the start
text.insert("insert linestart", '\n')
return "break"
indent = line[:i]
# strip whitespace before insert point unless it's in the prompt
i = 0
while line and line[-1] in " \t" and line != self.prompt_last_line:
line = line[:-1]
i = i+1
if i:
text.delete("insert - %d chars" % i, "insert")
# strip whitespace after insert point
while text.get("insert") in " \t":
text.delete("insert")
# start new line
text.insert("insert", '\n')
# adjust indentation for continuations and block
# open/close first need to find the last stmt
lno = index2line(text.index('insert'))
y = pyparse.Parser(self.indentwidth, self.tabwidth)
if not self.prompt_last_line:
for context in self.num_context_lines:
startat = max(lno - context, 1)
startatindex = repr(startat) + ".0"
rawtext = text.get(startatindex, "insert")
y.set_code(rawtext)
bod = y.find_good_parse_start(
self._build_char_in_string_func(startatindex))
if bod is not None or startat == 1:
break
y.set_lo(bod or 0)
else:
r = text.tag_prevrange("console", "insert")
if r:
startatindex = r[1]
else:
startatindex = "1.0"
rawtext = text.get(startatindex, "insert")
y.set_code(rawtext)
y.set_lo(0)
c = y.get_continuation_type()
if c != pyparse.C_NONE:
# The current stmt hasn't ended yet.
if c == pyparse.C_STRING_FIRST_LINE:
# after the first line of a string; do not indent at all
pass
elif c == pyparse.C_STRING_NEXT_LINES:
# inside a string which started before this line;
# just mimic the current indent
text.insert("insert", indent)
elif c == pyparse.C_BRACKET:
# line up with the first (if any) element of the
# last open bracket structure; else indent one
# level beyond the indent of the line with the
# last open bracket
self.reindent_to(y.compute_bracket_indent())
elif c == pyparse.C_BACKSLASH:
# if more than one line in this stmt already, just
# mimic the current indent; else if initial line
# has a start on an assignment stmt, indent to
# beyond leftmost =; else to beyond first chunk of
# non-whitespace on initial line
if y.get_num_lines_in_stmt() > 1:
text.insert("insert", indent)
else:
self.reindent_to(y.compute_backslash_indent())
else:
assert 0, "bogus continuation type %r" % (c,)
return "break"
# This line starts a brand new stmt; indent relative to
# indentation of initial line of closest preceding
# interesting stmt.
indent = y.get_base_indent_string()
text.insert("insert", indent)
if y.is_block_opener():
self.smart_indent_event(event)
elif indent and y.is_block_closer():
self.smart_backspace_event(event)
return "break"
finally:
text.see("insert")
text.undo_block_stop()
# Our editwin provides an is_char_in_string function that works
# with a Tk text index, but PyParse only knows about offsets into
# a string. This builds a function for PyParse that accepts an
# offset.
def _build_char_in_string_func(self, startindex):
def inner(offset, _startindex=startindex,
_icis=self.is_char_in_string):
return _icis(_startindex + "+%dc" % offset)
return inner
# XXX this isn't bound to anything -- see tabwidth comments
## def change_tabwidth_event(self, event):
## new = self._asktabwidth()
## if new != self.tabwidth:
## self.tabwidth = new
## self.set_indentation_params(0, guess=0)
## return "break"
# Make string that displays as n leading blanks.
def _make_blanks(self, n):
if self.usetabs:
ntabs, nspaces = divmod(n, self.tabwidth)
return '\t' * ntabs + ' ' * nspaces
else:
return ' ' * n
# Delete from beginning of line to insert point, then reinsert
# column logical (meaning use tabs if appropriate) spaces.
def reindent_to(self, column):
text = self.text
text.undo_block_start()
if text.compare("insert linestart", "!=", "insert"):
text.delete("insert linestart", "insert")
if column:
text.insert("insert", self._make_blanks(column))
text.undo_block_stop()
# Guess indentwidth from text content.
# Return guessed indentwidth. This should not be believed unless
# it's in a reasonable range (e.g., it will be 0 if no indented
# blocks are found).
def guess_indent(self):
opener, indented = IndentSearcher(self.text, self.tabwidth).run()
if opener and indented:
raw, indentsmall = get_line_indent(opener, self.tabwidth)
raw, indentlarge = get_line_indent(indented, self.tabwidth)
else:
indentsmall = indentlarge = 0
return indentlarge - indentsmall
def toggle_line_numbers_event(self, event=None):
if self.line_numbers is None:
return
if self.line_numbers.is_shown:
self.line_numbers.hide_sidebar()
menu_label = "Show"
else:
self.line_numbers.show_sidebar()
menu_label = "Hide"
self.update_menu_label(menu='options', index='*Line Numbers',
label=f'{menu_label} Line Numbers')
# "line.col" -> line, as an int
def index2line(index):
return int(float(index))
_line_indent_re = re.compile(r'[ \t]*')
def get_line_indent(line, tabwidth):
"""Return a line's indentation as (# chars, effective # of spaces).
The effective # of spaces is the length after properly "expanding"
the tabs into spaces, as done by str.expandtabs(tabwidth).
"""
m = _line_indent_re.match(line)
return m.end(), len(m.group().expandtabs(tabwidth))
class IndentSearcher(object):
# .run() chews over the Text widget, looking for a block opener
# and the stmt following it. Returns a pair,
# (line containing block opener, line containing stmt)
# Either or both may be None.
def __init__(self, text, tabwidth):
self.text = text
self.tabwidth = tabwidth
self.i = self.finished = 0
self.blkopenline = self.indentedline = None
def readline(self):
if self.finished:
return ""
i = self.i = self.i + 1
mark = repr(i) + ".0"
if self.text.compare(mark, ">=", "end"):
return ""
return self.text.get(mark, mark + " lineend+1c")
def tokeneater(self, type, token, start, end, line,
INDENT=tokenize.INDENT,
NAME=tokenize.NAME,
OPENERS=('class', 'def', 'for', 'if', 'try', 'while')):
if self.finished:
pass
elif type == NAME and token in OPENERS:
self.blkopenline = line
elif type == INDENT and self.blkopenline:
self.indentedline = line
self.finished = 1
def run(self):
save_tabsize = tokenize.tabsize
tokenize.tabsize = self.tabwidth
try:
try:
tokens = tokenize.generate_tokens(self.readline)
for token in tokens:
self.tokeneater(*token)
except (tokenize.TokenError, SyntaxError):
# since we cut off the tokenizer early, we can trigger
# spurious errors
pass
finally:
tokenize.tabsize = save_tabsize
return self.blkopenline, self.indentedline
### end autoindent code ###
def prepstr(s):
# Helper to extract the underscore from a string, e.g.
# prepstr("Co_py") returns (2, "Copy").
i = s.find('_')
if i >= 0:
s = s[:i] + s[i+1:]
return i, s
keynames = {
'bracketleft': '[',
'bracketright': ']',
'slash': '/',
}
def get_accelerator(keydefs, eventname):
keylist = keydefs.get(eventname)
# issue10940: temporary workaround to prevent hang with OS X Cocoa Tk 8.5
# if not keylist:
if (not keylist) or (macosx.isCocoaTk() and eventname in {
"<<open-module>>",
"<<goto-line>>",
"<<change-indentwidth>>"}):
return ""
s = keylist[0]
s = re.sub(r"-[a-z]\b", lambda m: m.group().upper(), s)
s = re.sub(r"\b\w+\b", lambda m: keynames.get(m.group(), m.group()), s)
s = re.sub("Key-", "", s)
s = re.sub("Cancel","Ctrl-Break",s) # dscherer@cmu.edu
s = re.sub("Control-", "Ctrl-", s)
s = re.sub("-", "+", s)
s = re.sub("><", " ", s)
s = re.sub("<", "", s)
s = re.sub(">", "", s)
return s
def fixwordbreaks(root):
# On Windows, tcl/tk breaks 'words' only on spaces, as in Command Prompt.
# We want Motif style everywhere. See #21474, msg218992 and followup.
tk = root.tk
tk.call('tcl_wordBreakAfter', 'a b', 0) # make sure word.tcl is loaded
tk.call('set', 'tcl_wordchars', r'\w')
tk.call('set', 'tcl_nonwordchars', r'\W')
def _editor_window(parent): # htest #
# error if close master window first - timer event, after script
root = parent
fixwordbreaks(root)
if sys.argv[1:]:
filename = sys.argv[1]
else:
filename = None
macosx.setupApp(root, None)
edit = EditorWindow(root=root, filename=filename)
text = edit.text
text['height'] = 10
for i in range(20):
text.insert('insert', ' '*i + str(i) + '\n')
# text.bind("<<close-all-windows>>", edit.close_event)
# Does not stop error, neither does following
# edit.text.bind("<<close-window>>", edit.close_event)
if __name__ == '__main__':
from unittest import main
main('idlelib.idle_test.test_editor', verbosity=2, exit=False)
from idlelib.idle_test.htest import run
run(_editor_window)
| 39.394595 | 95 | 0.57507 |
793ed41fe0e68f364daedabb91d61b5861fc77ee | 136 | py | Python | src/utils/common.py | phamnam-mta/know-life | f7c226c41e315f21b5d7fe2ccbc9ec4f9961ed1d | [
"MIT"
] | null | null | null | src/utils/common.py | phamnam-mta/know-life | f7c226c41e315f21b5d7fe2ccbc9ec4f9961ed1d | [
"MIT"
] | null | null | null | src/utils/common.py | phamnam-mta/know-life | f7c226c41e315f21b5d7fe2ccbc9ec4f9961ed1d | [
"MIT"
] | null | null | null | from typing import Text
def is_float(text: Text):
try:
x = float(text)
return True
except:
return False | 17 | 25 | 0.580882 |
793ed4662e5840e770d3dfd52f2992a28533984d | 457 | py | Python | Python/Algorithm/check_even.py | LilyYC/legendary-train | 214525afeeb2da2409f451bf269e792c6940a1ba | [
"MIT"
] | null | null | null | Python/Algorithm/check_even.py | LilyYC/legendary-train | 214525afeeb2da2409f451bf269e792c6940a1ba | [
"MIT"
] | null | null | null | Python/Algorithm/check_even.py | LilyYC/legendary-train | 214525afeeb2da2409f451bf269e792c6940a1ba | [
"MIT"
] | null | null | null | def only_evens(lst):
""" (list of list of int) -> list of list of int
# write your code here (be sure to read above for a suggested approach)
Return a list of the lists in lst that contain only even integers.
>>> only_evens([[1, 2, 4], [4, 0, 6], [22, 4, 3], [2]])
[[4, 0, 6], [2]]
"""
even_lists = []
for sublist in lst:
if only_even1(sublist) == True:
even_lists.append(sublist)
return even_lists | 30.466667 | 72 | 0.582057 |
793ed496c77af4068520ca1fdfff7e4553c4eaad | 45,046 | py | Python | Lib/ssl.py | Antony-Wish/cpython | e1f95e77e0647aff602e0660ba3c282b71045875 | [
"CNRI-Python-GPL-Compatible"
] | 2 | 2018-10-21T15:09:29.000Z | 2018-11-12T09:30:15.000Z | Lib/ssl.py | Antony-Wish/cpython | e1f95e77e0647aff602e0660ba3c282b71045875 | [
"CNRI-Python-GPL-Compatible"
] | 1 | 2021-06-25T15:32:08.000Z | 2021-06-25T15:32:08.000Z | Lib/ssl.py | Antony-Wish/cpython | e1f95e77e0647aff602e0660ba3c282b71045875 | [
"CNRI-Python-GPL-Compatible"
] | null | null | null | # Wrapper module for _ssl, providing some additional facilities
# implemented in Python. Written by Bill Janssen.
"""This module provides some more Pythonic support for SSL.
Object types:
SSLSocket -- subtype of socket.socket which does SSL over the socket
Exceptions:
SSLError -- exception raised for I/O errors
Functions:
cert_time_to_seconds -- convert time string used for certificate
notBefore and notAfter functions to integer
seconds past the Epoch (the time values
returned from time.time())
fetch_server_certificate (HOST, PORT) -- fetch the certificate provided
by the server running on HOST at port PORT. No
validation of the certificate is performed.
Integer constants:
SSL_ERROR_ZERO_RETURN
SSL_ERROR_WANT_READ
SSL_ERROR_WANT_WRITE
SSL_ERROR_WANT_X509_LOOKUP
SSL_ERROR_SYSCALL
SSL_ERROR_SSL
SSL_ERROR_WANT_CONNECT
SSL_ERROR_EOF
SSL_ERROR_INVALID_ERROR_CODE
The following group define certificate requirements that one side is
allowing/requiring from the other side:
CERT_NONE - no certificates from the other side are required (or will
be looked at if provided)
CERT_OPTIONAL - certificates are not required, but if provided will be
validated, and if validation fails, the connection will
also fail
CERT_REQUIRED - certificates are required, and will be validated, and
if validation fails, the connection will also fail
The following constants identify various SSL protocol variants:
PROTOCOL_SSLv2
PROTOCOL_SSLv3
PROTOCOL_SSLv23
PROTOCOL_TLS
PROTOCOL_TLS_CLIENT
PROTOCOL_TLS_SERVER
PROTOCOL_TLSv1
PROTOCOL_TLSv1_1
PROTOCOL_TLSv1_2
The following constants identify various SSL alert message descriptions as per
http://www.iana.org/assignments/tls-parameters/tls-parameters.xml#tls-parameters-6
ALERT_DESCRIPTION_CLOSE_NOTIFY
ALERT_DESCRIPTION_UNEXPECTED_MESSAGE
ALERT_DESCRIPTION_BAD_RECORD_MAC
ALERT_DESCRIPTION_RECORD_OVERFLOW
ALERT_DESCRIPTION_DECOMPRESSION_FAILURE
ALERT_DESCRIPTION_HANDSHAKE_FAILURE
ALERT_DESCRIPTION_BAD_CERTIFICATE
ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE
ALERT_DESCRIPTION_CERTIFICATE_REVOKED
ALERT_DESCRIPTION_CERTIFICATE_EXPIRED
ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN
ALERT_DESCRIPTION_ILLEGAL_PARAMETER
ALERT_DESCRIPTION_UNKNOWN_CA
ALERT_DESCRIPTION_ACCESS_DENIED
ALERT_DESCRIPTION_DECODE_ERROR
ALERT_DESCRIPTION_DECRYPT_ERROR
ALERT_DESCRIPTION_PROTOCOL_VERSION
ALERT_DESCRIPTION_INSUFFICIENT_SECURITY
ALERT_DESCRIPTION_INTERNAL_ERROR
ALERT_DESCRIPTION_USER_CANCELLED
ALERT_DESCRIPTION_NO_RENEGOTIATION
ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION
ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE
ALERT_DESCRIPTION_UNRECOGNIZED_NAME
ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE
ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE
ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY
"""
import sys
import os
from collections import namedtuple
from enum import Enum as _Enum, IntEnum as _IntEnum, IntFlag as _IntFlag
import _ssl # if we can't import it, let the error propagate
from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
from _ssl import _SSLContext, MemoryBIO, SSLSession
from _ssl import (
SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError,
SSLSyscallError, SSLEOFError, SSLCertVerificationError
)
from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj
from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes
try:
from _ssl import RAND_egd
except ImportError:
# LibreSSL does not provide RAND_egd
pass
from _ssl import (
HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN, HAS_SSLv2, HAS_SSLv3, HAS_TLSv1,
HAS_TLSv1_1, HAS_TLSv1_2, HAS_TLSv1_3
)
from _ssl import _DEFAULT_CIPHERS, _OPENSSL_API_VERSION
_IntEnum._convert_(
'_SSLMethod', __name__,
lambda name: name.startswith('PROTOCOL_') and name != 'PROTOCOL_SSLv23',
source=_ssl)
_IntFlag._convert_(
'Options', __name__,
lambda name: name.startswith('OP_'),
source=_ssl)
_IntEnum._convert_(
'AlertDescription', __name__,
lambda name: name.startswith('ALERT_DESCRIPTION_'),
source=_ssl)
_IntEnum._convert_(
'SSLErrorNumber', __name__,
lambda name: name.startswith('SSL_ERROR_'),
source=_ssl)
_IntFlag._convert_(
'VerifyFlags', __name__,
lambda name: name.startswith('VERIFY_'),
source=_ssl)
_IntEnum._convert_(
'VerifyMode', __name__,
lambda name: name.startswith('CERT_'),
source=_ssl)
PROTOCOL_SSLv23 = _SSLMethod.PROTOCOL_SSLv23 = _SSLMethod.PROTOCOL_TLS
_PROTOCOL_NAMES = {value: name for name, value in _SSLMethod.__members__.items()}
_SSLv2_IF_EXISTS = getattr(_SSLMethod, 'PROTOCOL_SSLv2', None)
class TLSVersion(_IntEnum):
MINIMUM_SUPPORTED = _ssl.PROTO_MINIMUM_SUPPORTED
SSLv3 = _ssl.PROTO_SSLv3
TLSv1 = _ssl.PROTO_TLSv1
TLSv1_1 = _ssl.PROTO_TLSv1_1
TLSv1_2 = _ssl.PROTO_TLSv1_2
TLSv1_3 = _ssl.PROTO_TLSv1_3
MAXIMUM_SUPPORTED = _ssl.PROTO_MAXIMUM_SUPPORTED
if sys.platform == "win32":
from _ssl import enum_certificates, enum_crls
from socket import socket, AF_INET, SOCK_STREAM, create_connection
from socket import SOL_SOCKET, SO_TYPE
import socket as _socket
import base64 # for DER-to-PEM translation
import errno
import warnings
socket_error = OSError # keep that public name in module namespace
CHANNEL_BINDING_TYPES = ['tls-unique']
HAS_NEVER_CHECK_COMMON_NAME = hasattr(_ssl, 'HOSTFLAG_NEVER_CHECK_SUBJECT')
_RESTRICTED_SERVER_CIPHERS = _DEFAULT_CIPHERS
CertificateError = SSLCertVerificationError
def _dnsname_match(dn, hostname):
"""Matching according to RFC 6125, section 6.4.3
- Hostnames are compared lower case.
- For IDNA, both dn and hostname must be encoded as IDN A-label (ACE).
- Partial wildcards like 'www*.example.org', multiple wildcards, sole
wildcard or wildcards in labels other then the left-most label are not
supported and a CertificateError is raised.
- A wildcard must match at least one character.
"""
if not dn:
return False
wildcards = dn.count('*')
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
if wildcards > 1:
raise CertificateError(
"too many wildcards in certificate DNS name: {!r}.".format(dn))
dn_leftmost, sep, dn_remainder = dn.partition('.')
if '*' in dn_remainder:
# Only match wildcard in leftmost segment.
raise CertificateError(
"wildcard can only be present in the leftmost label: "
"{!r}.".format(dn))
if not sep:
# no right side
raise CertificateError(
"sole wildcard without additional labels are not support: "
"{!r}.".format(dn))
if dn_leftmost != '*':
# no partial wildcard matching
raise CertificateError(
"partial wildcards in leftmost label are not supported: "
"{!r}.".format(dn))
hostname_leftmost, sep, hostname_remainder = hostname.partition('.')
if not hostname_leftmost or not sep:
# wildcard must match at least one char
return False
return dn_remainder.lower() == hostname_remainder.lower()
def _inet_paton(ipname):
"""Try to convert an IP address to packed binary form
Supports IPv4 addresses on all platforms and IPv6 on platforms with IPv6
support.
"""
# inet_aton() also accepts strings like '1'
if ipname.count('.') == 3:
try:
return _socket.inet_aton(ipname)
except OSError:
pass
try:
return _socket.inet_pton(_socket.AF_INET6, ipname)
except OSError:
raise ValueError("{!r} is neither an IPv4 nor an IP6 "
"address.".format(ipname))
except AttributeError:
# AF_INET6 not available
pass
raise ValueError("{!r} is not an IPv4 address.".format(ipname))
def _ipaddress_match(ipname, host_ip):
"""Exact matching of IP addresses.
RFC 6125 explicitly doesn't define an algorithm for this
(section 1.7.2 - "Out of Scope").
"""
# OpenSSL may add a trailing newline to a subjectAltName's IP address
ip = _inet_paton(ipname.rstrip())
return ip == host_ip
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed.
The function matches IP addresses rather than dNSNames if hostname is a
valid ipaddress string. IPv4 addresses are supported on all platforms.
IPv6 addresses are supported on platforms with IPv6 support (AF_INET6
and inet_pton).
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED")
try:
host_ip = _inet_paton(hostname)
except ValueError:
# Not an IP address (common case)
host_ip = None
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if host_ip is None and _dnsname_match(value, hostname):
return
dnsnames.append(value)
elif key == 'IP Address':
if host_ip is not None and _ipaddress_match(value, host_ip):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
DefaultVerifyPaths = namedtuple("DefaultVerifyPaths",
"cafile capath openssl_cafile_env openssl_cafile openssl_capath_env "
"openssl_capath")
def get_default_verify_paths():
"""Return paths to default cafile and capath.
"""
parts = _ssl.get_default_verify_paths()
# environment vars shadow paths
cafile = os.environ.get(parts[0], parts[1])
capath = os.environ.get(parts[2], parts[3])
return DefaultVerifyPaths(cafile if os.path.isfile(cafile) else None,
capath if os.path.isdir(capath) else None,
*parts)
class _ASN1Object(namedtuple("_ASN1Object", "nid shortname longname oid")):
"""ASN.1 object identifier lookup
"""
__slots__ = ()
def __new__(cls, oid):
return super().__new__(cls, *_txt2obj(oid, name=False))
@classmethod
def fromnid(cls, nid):
"""Create _ASN1Object from OpenSSL numeric ID
"""
return super().__new__(cls, *_nid2obj(nid))
@classmethod
def fromname(cls, name):
"""Create _ASN1Object from short name, long name or OID
"""
return super().__new__(cls, *_txt2obj(name, name=True))
class Purpose(_ASN1Object, _Enum):
"""SSLContext purpose flags with X509v3 Extended Key Usage objects
"""
SERVER_AUTH = '1.3.6.1.5.5.7.3.1'
CLIENT_AUTH = '1.3.6.1.5.5.7.3.2'
class SSLContext(_SSLContext):
"""An SSLContext holds various SSL-related configuration options and
data, such as certificates and possibly a private key."""
_windows_cert_stores = ("CA", "ROOT")
sslsocket_class = None # SSLSocket is assigned later.
sslobject_class = None # SSLObject is assigned later.
def __new__(cls, protocol=PROTOCOL_TLS, *args, **kwargs):
self = _SSLContext.__new__(cls, protocol)
return self
def _encode_hostname(self, hostname):
if hostname is None:
return None
elif isinstance(hostname, str):
return hostname.encode('idna').decode('ascii')
else:
return hostname.decode('ascii')
def wrap_socket(self, sock, server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None, session=None):
# SSLSocket class handles server_hostname encoding before it calls
# ctx._wrap_socket()
return self.sslsocket_class._create(
sock=sock,
server_side=server_side,
do_handshake_on_connect=do_handshake_on_connect,
suppress_ragged_eofs=suppress_ragged_eofs,
server_hostname=server_hostname,
context=self,
session=session
)
def wrap_bio(self, incoming, outgoing, server_side=False,
server_hostname=None, session=None):
# Need to encode server_hostname here because _wrap_bio() can only
# handle ASCII str.
return self.sslobject_class._create(
incoming, outgoing, server_side=server_side,
server_hostname=self._encode_hostname(server_hostname),
session=session, context=self,
)
def set_npn_protocols(self, npn_protocols):
protos = bytearray()
for protocol in npn_protocols:
b = bytes(protocol, 'ascii')
if len(b) == 0 or len(b) > 255:
raise SSLError('NPN protocols must be 1 to 255 in length')
protos.append(len(b))
protos.extend(b)
self._set_npn_protocols(protos)
def set_servername_callback(self, server_name_callback):
if server_name_callback is None:
self.sni_callback = None
else:
if not callable(server_name_callback):
raise TypeError("not a callable object")
def shim_cb(sslobj, servername, sslctx):
servername = self._encode_hostname(servername)
return server_name_callback(sslobj, servername, sslctx)
self.sni_callback = shim_cb
def set_alpn_protocols(self, alpn_protocols):
protos = bytearray()
for protocol in alpn_protocols:
b = bytes(protocol, 'ascii')
if len(b) == 0 or len(b) > 255:
raise SSLError('ALPN protocols must be 1 to 255 in length')
protos.append(len(b))
protos.extend(b)
self._set_alpn_protocols(protos)
def _load_windows_store_certs(self, storename, purpose):
certs = bytearray()
try:
for cert, encoding, trust in enum_certificates(storename):
# CA certs are never PKCS#7 encoded
if encoding == "x509_asn":
if trust is True or purpose.oid in trust:
certs.extend(cert)
except PermissionError:
warnings.warn("unable to enumerate Windows certificate store")
if certs:
self.load_verify_locations(cadata=certs)
return certs
def load_default_certs(self, purpose=Purpose.SERVER_AUTH):
if not isinstance(purpose, _ASN1Object):
raise TypeError(purpose)
if sys.platform == "win32":
for storename in self._windows_cert_stores:
self._load_windows_store_certs(storename, purpose)
self.set_default_verify_paths()
if hasattr(_SSLContext, 'minimum_version'):
@property
def minimum_version(self):
return TLSVersion(super().minimum_version)
@minimum_version.setter
def minimum_version(self, value):
if value == TLSVersion.SSLv3:
self.options &= ~Options.OP_NO_SSLv3
super(SSLContext, SSLContext).minimum_version.__set__(self, value)
@property
def maximum_version(self):
return TLSVersion(super().maximum_version)
@maximum_version.setter
def maximum_version(self, value):
super(SSLContext, SSLContext).maximum_version.__set__(self, value)
@property
def options(self):
return Options(super().options)
@options.setter
def options(self, value):
super(SSLContext, SSLContext).options.__set__(self, value)
if hasattr(_ssl, 'HOSTFLAG_NEVER_CHECK_SUBJECT'):
@property
def hostname_checks_common_name(self):
ncs = self._host_flags & _ssl.HOSTFLAG_NEVER_CHECK_SUBJECT
return ncs != _ssl.HOSTFLAG_NEVER_CHECK_SUBJECT
@hostname_checks_common_name.setter
def hostname_checks_common_name(self, value):
if value:
self._host_flags &= ~_ssl.HOSTFLAG_NEVER_CHECK_SUBJECT
else:
self._host_flags |= _ssl.HOSTFLAG_NEVER_CHECK_SUBJECT
else:
@property
def hostname_checks_common_name(self):
return True
@property
def protocol(self):
return _SSLMethod(super().protocol)
@property
def verify_flags(self):
return VerifyFlags(super().verify_flags)
@verify_flags.setter
def verify_flags(self, value):
super(SSLContext, SSLContext).verify_flags.__set__(self, value)
@property
def verify_mode(self):
value = super().verify_mode
try:
return VerifyMode(value)
except ValueError:
return value
@verify_mode.setter
def verify_mode(self, value):
super(SSLContext, SSLContext).verify_mode.__set__(self, value)
def create_default_context(purpose=Purpose.SERVER_AUTH, *, cafile=None,
capath=None, cadata=None):
"""Create a SSLContext object with default settings.
NOTE: The protocol and settings may change anytime without prior
deprecation. The values represent a fair balance between maximum
compatibility and security.
"""
if not isinstance(purpose, _ASN1Object):
raise TypeError(purpose)
# SSLContext sets OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION,
# OP_CIPHER_SERVER_PREFERENCE, OP_SINGLE_DH_USE and OP_SINGLE_ECDH_USE
# by default.
context = SSLContext(PROTOCOL_TLS)
if purpose == Purpose.SERVER_AUTH:
# verify certs and host name in client mode
context.verify_mode = CERT_REQUIRED
context.check_hostname = True
if cafile or capath or cadata:
context.load_verify_locations(cafile, capath, cadata)
elif context.verify_mode != CERT_NONE:
# no explicit cafile, capath or cadata but the verify mode is
# CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system
# root CA certificates for the given purpose. This may fail silently.
context.load_default_certs(purpose)
return context
def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=CERT_NONE,
check_hostname=False, purpose=Purpose.SERVER_AUTH,
certfile=None, keyfile=None,
cafile=None, capath=None, cadata=None):
"""Create a SSLContext object for Python stdlib modules
All Python stdlib modules shall use this function to create SSLContext
objects in order to keep common settings in one place. The configuration
is less restrict than create_default_context()'s to increase backward
compatibility.
"""
if not isinstance(purpose, _ASN1Object):
raise TypeError(purpose)
# SSLContext sets OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION,
# OP_CIPHER_SERVER_PREFERENCE, OP_SINGLE_DH_USE and OP_SINGLE_ECDH_USE
# by default.
context = SSLContext(protocol)
if not check_hostname:
context.check_hostname = False
if cert_reqs is not None:
context.verify_mode = cert_reqs
if check_hostname:
context.check_hostname = True
if keyfile and not certfile:
raise ValueError("certfile must be specified")
if certfile or keyfile:
context.load_cert_chain(certfile, keyfile)
# load CA root certs
if cafile or capath or cadata:
context.load_verify_locations(cafile, capath, cadata)
elif context.verify_mode != CERT_NONE:
# no explicit cafile, capath or cadata but the verify mode is
# CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system
# root CA certificates for the given purpose. This may fail silently.
context.load_default_certs(purpose)
return context
# Used by http.client if no context is explicitly passed.
_create_default_https_context = create_default_context
# Backwards compatibility alias, even though it's not a public name.
_create_stdlib_context = _create_unverified_context
class SSLObject:
"""This class implements an interface on top of a low-level SSL object as
implemented by OpenSSL. This object captures the state of an SSL connection
but does not provide any network IO itself. IO needs to be performed
through separate "BIO" objects which are OpenSSL's IO abstraction layer.
This class does not have a public constructor. Instances are returned by
``SSLContext.wrap_bio``. This class is typically used by framework authors
that want to implement asynchronous IO for SSL through memory buffers.
When compared to ``SSLSocket``, this object lacks the following features:
* Any form of network IO, including methods such as ``recv`` and ``send``.
* The ``do_handshake_on_connect`` and ``suppress_ragged_eofs`` machinery.
"""
def __init__(self, *args, **kwargs):
raise TypeError(
f"{self.__class__.__name__} does not have a public "
f"constructor. Instances are returned by SSLContext.wrap_bio()."
)
@classmethod
def _create(cls, incoming, outgoing, server_side=False,
server_hostname=None, session=None, context=None):
self = cls.__new__(cls)
sslobj = context._wrap_bio(
incoming, outgoing, server_side=server_side,
server_hostname=server_hostname,
owner=self, session=session
)
self._sslobj = sslobj
return self
@property
def context(self):
"""The SSLContext that is currently in use."""
return self._sslobj.context
@context.setter
def context(self, ctx):
self._sslobj.context = ctx
@property
def session(self):
"""The SSLSession for client socket."""
return self._sslobj.session
@session.setter
def session(self, session):
self._sslobj.session = session
@property
def session_reused(self):
"""Was the client session reused during handshake"""
return self._sslobj.session_reused
@property
def server_side(self):
"""Whether this is a server-side socket."""
return self._sslobj.server_side
@property
def server_hostname(self):
"""The currently set server hostname (for SNI), or ``None`` if no
server hostame is set."""
return self._sslobj.server_hostname
def read(self, len=1024, buffer=None):
"""Read up to 'len' bytes from the SSL object and return them.
If 'buffer' is provided, read into this buffer and return the number of
bytes read.
"""
if buffer is not None:
v = self._sslobj.read(len, buffer)
else:
v = self._sslobj.read(len)
return v
def write(self, data):
"""Write 'data' to the SSL object and return the number of bytes
written.
The 'data' argument must support the buffer interface.
"""
return self._sslobj.write(data)
def getpeercert(self, binary_form=False):
"""Returns a formatted version of the data in the certificate provided
by the other end of the SSL channel.
Return None if no certificate was provided, {} if a certificate was
provided, but not validated.
"""
return self._sslobj.getpeercert(binary_form)
def selected_npn_protocol(self):
"""Return the currently selected NPN protocol as a string, or ``None``
if a next protocol was not negotiated or if NPN is not supported by one
of the peers."""
if _ssl.HAS_NPN:
return self._sslobj.selected_npn_protocol()
def selected_alpn_protocol(self):
"""Return the currently selected ALPN protocol as a string, or ``None``
if a next protocol was not negotiated or if ALPN is not supported by one
of the peers."""
if _ssl.HAS_ALPN:
return self._sslobj.selected_alpn_protocol()
def cipher(self):
"""Return the currently selected cipher as a 3-tuple ``(name,
ssl_version, secret_bits)``."""
return self._sslobj.cipher()
def shared_ciphers(self):
"""Return a list of ciphers shared by the client during the handshake or
None if this is not a valid server connection.
"""
return self._sslobj.shared_ciphers()
def compression(self):
"""Return the current compression algorithm in use, or ``None`` if
compression was not negotiated or not supported by one of the peers."""
return self._sslobj.compression()
def pending(self):
"""Return the number of bytes that can be read immediately."""
return self._sslobj.pending()
def do_handshake(self):
"""Start the SSL/TLS handshake."""
self._sslobj.do_handshake()
def unwrap(self):
"""Start the SSL shutdown handshake."""
return self._sslobj.shutdown()
def get_channel_binding(self, cb_type="tls-unique"):
"""Get channel binding data for current connection. Raise ValueError
if the requested `cb_type` is not supported. Return bytes of the data
or None if the data is not available (e.g. before the handshake)."""
return self._sslobj.get_channel_binding(cb_type)
def version(self):
"""Return a string identifying the protocol version used by the
current SSL channel. """
return self._sslobj.version()
def verify_client_post_handshake(self):
return self._sslobj.verify_client_post_handshake()
def _sslcopydoc(func):
"""Copy docstring from SSLObject to SSLSocket"""
func.__doc__ = getattr(SSLObject, func.__name__).__doc__
return func
class SSLSocket(socket):
"""This class implements a subtype of socket.socket that wraps
the underlying OS socket in an SSL context when necessary, and
provides read and write methods over that channel. """
def __init__(self, *args, **kwargs):
raise TypeError(
f"{self.__class__.__name__} does not have a public "
f"constructor. Instances are returned by "
f"SSLContext.wrap_socket()."
)
@classmethod
def _create(cls, sock, server_side=False, do_handshake_on_connect=True,
suppress_ragged_eofs=True, server_hostname=None,
context=None, session=None):
if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM:
raise NotImplementedError("only stream sockets are supported")
if server_side:
if server_hostname:
raise ValueError("server_hostname can only be specified "
"in client mode")
if session is not None:
raise ValueError("session can only be specified in "
"client mode")
if context.check_hostname and not server_hostname:
raise ValueError("check_hostname requires server_hostname")
kwargs = dict(
family=sock.family, type=sock.type, proto=sock.proto,
fileno=sock.fileno()
)
self = cls.__new__(cls, **kwargs)
super(SSLSocket, self).__init__(**kwargs)
self.settimeout(sock.gettimeout())
sock.detach()
self._context = context
self._session = session
self._closed = False
self._sslobj = None
self.server_side = server_side
self.server_hostname = context._encode_hostname(server_hostname)
self.do_handshake_on_connect = do_handshake_on_connect
self.suppress_ragged_eofs = suppress_ragged_eofs
# See if we are connected
try:
self.getpeername()
except OSError as e:
if e.errno != errno.ENOTCONN:
raise
connected = False
else:
connected = True
self._connected = connected
if connected:
# create the SSL object
try:
self._sslobj = self._context._wrap_socket(
self, server_side, self.server_hostname,
owner=self, session=self._session,
)
if do_handshake_on_connect:
timeout = self.gettimeout()
if timeout == 0.0:
# non-blocking
raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
self.do_handshake()
except (OSError, ValueError):
self.close()
raise
return self
@property
@_sslcopydoc
def context(self):
return self._context
@context.setter
def context(self, ctx):
self._context = ctx
self._sslobj.context = ctx
@property
@_sslcopydoc
def session(self):
if self._sslobj is not None:
return self._sslobj.session
@session.setter
def session(self, session):
self._session = session
if self._sslobj is not None:
self._sslobj.session = session
@property
@_sslcopydoc
def session_reused(self):
if self._sslobj is not None:
return self._sslobj.session_reused
def dup(self):
raise NotImplementedError("Can't dup() %s instances" %
self.__class__.__name__)
def _checkClosed(self, msg=None):
# raise an exception here if you wish to check for spurious closes
pass
def _check_connected(self):
if not self._connected:
# getpeername() will raise ENOTCONN if the socket is really
# not connected; note that we can be connected even without
# _connected being set, e.g. if connect() first returned
# EAGAIN.
self.getpeername()
def read(self, len=1024, buffer=None):
"""Read up to LEN bytes and return them.
Return zero-length string on EOF."""
self._checkClosed()
if self._sslobj is None:
raise ValueError("Read on closed or unwrapped SSL socket.")
try:
if buffer is not None:
return self._sslobj.read(len, buffer)
else:
return self._sslobj.read(len)
except SSLError as x:
if x.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs:
if buffer is not None:
return 0
else:
return b''
else:
raise
def write(self, data):
"""Write DATA to the underlying SSL channel. Returns
number of bytes of DATA actually transmitted."""
self._checkClosed()
if self._sslobj is None:
raise ValueError("Write on closed or unwrapped SSL socket.")
return self._sslobj.write(data)
@_sslcopydoc
def getpeercert(self, binary_form=False):
self._checkClosed()
self._check_connected()
return self._sslobj.getpeercert(binary_form)
@_sslcopydoc
def selected_npn_protocol(self):
self._checkClosed()
if self._sslobj is None or not _ssl.HAS_NPN:
return None
else:
return self._sslobj.selected_npn_protocol()
@_sslcopydoc
def selected_alpn_protocol(self):
self._checkClosed()
if self._sslobj is None or not _ssl.HAS_ALPN:
return None
else:
return self._sslobj.selected_alpn_protocol()
@_sslcopydoc
def cipher(self):
self._checkClosed()
if self._sslobj is None:
return None
else:
return self._sslobj.cipher()
@_sslcopydoc
def shared_ciphers(self):
self._checkClosed()
if self._sslobj is None:
return None
else:
return self._sslobj.shared_ciphers()
@_sslcopydoc
def compression(self):
self._checkClosed()
if self._sslobj is None:
return None
else:
return self._sslobj.compression()
def send(self, data, flags=0):
self._checkClosed()
if self._sslobj is not None:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to send() on %s" %
self.__class__)
return self._sslobj.write(data)
else:
return super().send(data, flags)
def sendto(self, data, flags_or_addr, addr=None):
self._checkClosed()
if self._sslobj is not None:
raise ValueError("sendto not allowed on instances of %s" %
self.__class__)
elif addr is None:
return super().sendto(data, flags_or_addr)
else:
return super().sendto(data, flags_or_addr, addr)
def sendmsg(self, *args, **kwargs):
# Ensure programs don't send data unencrypted if they try to
# use this method.
raise NotImplementedError("sendmsg not allowed on instances of %s" %
self.__class__)
def sendall(self, data, flags=0):
self._checkClosed()
if self._sslobj is not None:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to sendall() on %s" %
self.__class__)
count = 0
with memoryview(data) as view, view.cast("B") as byte_view:
amount = len(byte_view)
while count < amount:
v = self.send(byte_view[count:])
count += v
else:
return super().sendall(data, flags)
def sendfile(self, file, offset=0, count=None):
"""Send a file, possibly by using os.sendfile() if this is a
clear-text socket. Return the total number of bytes sent.
"""
if self._sslobj is not None:
return self._sendfile_use_send(file, offset, count)
else:
# os.sendfile() works with plain sockets only
return super().sendfile(file, offset, count)
def recv(self, buflen=1024, flags=0):
self._checkClosed()
if self._sslobj is not None:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to recv() on %s" %
self.__class__)
return self.read(buflen)
else:
return super().recv(buflen, flags)
def recv_into(self, buffer, nbytes=None, flags=0):
self._checkClosed()
if buffer and (nbytes is None):
nbytes = len(buffer)
elif nbytes is None:
nbytes = 1024
if self._sslobj is not None:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to recv_into() on %s" %
self.__class__)
return self.read(nbytes, buffer)
else:
return super().recv_into(buffer, nbytes, flags)
def recvfrom(self, buflen=1024, flags=0):
self._checkClosed()
if self._sslobj is not None:
raise ValueError("recvfrom not allowed on instances of %s" %
self.__class__)
else:
return super().recvfrom(buflen, flags)
def recvfrom_into(self, buffer, nbytes=None, flags=0):
self._checkClosed()
if self._sslobj is not None:
raise ValueError("recvfrom_into not allowed on instances of %s" %
self.__class__)
else:
return super().recvfrom_into(buffer, nbytes, flags)
def recvmsg(self, *args, **kwargs):
raise NotImplementedError("recvmsg not allowed on instances of %s" %
self.__class__)
def recvmsg_into(self, *args, **kwargs):
raise NotImplementedError("recvmsg_into not allowed on instances of "
"%s" % self.__class__)
@_sslcopydoc
def pending(self):
self._checkClosed()
if self._sslobj is not None:
return self._sslobj.pending()
else:
return 0
def shutdown(self, how):
self._checkClosed()
self._sslobj = None
super().shutdown(how)
@_sslcopydoc
def unwrap(self):
if self._sslobj:
s = self._sslobj.shutdown()
self._sslobj = None
return s
else:
raise ValueError("No SSL wrapper around " + str(self))
@_sslcopydoc
def verify_client_post_handshake(self):
if self._sslobj:
return self._sslobj.verify_client_post_handshake()
else:
raise ValueError("No SSL wrapper around " + str(self))
def _real_close(self):
self._sslobj = None
super()._real_close()
@_sslcopydoc
def do_handshake(self, block=False):
self._check_connected()
timeout = self.gettimeout()
try:
if timeout == 0.0 and block:
self.settimeout(None)
self._sslobj.do_handshake()
finally:
self.settimeout(timeout)
def _real_connect(self, addr, connect_ex):
if self.server_side:
raise ValueError("can't connect in server-side mode")
# Here we assume that the socket is client-side, and not
# connected at the time of the call. We connect it, then wrap it.
if self._connected or self._sslobj is not None:
raise ValueError("attempt to connect already-connected SSLSocket!")
self._sslobj = self.context._wrap_socket(
self, False, self.server_hostname,
owner=self, session=self._session
)
try:
if connect_ex:
rc = super().connect_ex(addr)
else:
rc = None
super().connect(addr)
if not rc:
self._connected = True
if self.do_handshake_on_connect:
self.do_handshake()
return rc
except (OSError, ValueError):
self._sslobj = None
raise
def connect(self, addr):
"""Connects to remote ADDR, and then wraps the connection in
an SSL channel."""
self._real_connect(addr, False)
def connect_ex(self, addr):
"""Connects to remote ADDR, and then wraps the connection in
an SSL channel."""
return self._real_connect(addr, True)
def accept(self):
"""Accepts a new connection from a remote client, and returns
a tuple containing that new connection wrapped with a server-side
SSL channel, and the address of the remote client."""
newsock, addr = super().accept()
newsock = self.context.wrap_socket(newsock,
do_handshake_on_connect=self.do_handshake_on_connect,
suppress_ragged_eofs=self.suppress_ragged_eofs,
server_side=True)
return newsock, addr
@_sslcopydoc
def get_channel_binding(self, cb_type="tls-unique"):
if self._sslobj is not None:
return self._sslobj.get_channel_binding(cb_type)
else:
if cb_type not in CHANNEL_BINDING_TYPES:
raise ValueError(
"{0} channel binding type not implemented".format(cb_type)
)
return None
@_sslcopydoc
def version(self):
if self._sslobj is not None:
return self._sslobj.version()
else:
return None
# Python does not support forward declaration of types.
SSLContext.sslsocket_class = SSLSocket
SSLContext.sslobject_class = SSLObject
def wrap_socket(sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_TLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
ciphers=None):
if server_side and not certfile:
raise ValueError("certfile must be specified for server-side "
"operations")
if keyfile and not certfile:
raise ValueError("certfile must be specified")
context = SSLContext(ssl_version)
context.verify_mode = cert_reqs
if ca_certs:
context.load_verify_locations(ca_certs)
if certfile:
context.load_cert_chain(certfile, keyfile)
if ciphers:
context.set_ciphers(ciphers)
return context.wrap_socket(
sock=sock, server_side=server_side,
do_handshake_on_connect=do_handshake_on_connect,
suppress_ragged_eofs=suppress_ragged_eofs
)
# some utility functions
def cert_time_to_seconds(cert_time):
"""Return the time in seconds since the Epoch, given the timestring
representing the "notBefore" or "notAfter" date from a certificate
in ``"%b %d %H:%M:%S %Y %Z"`` strptime format (C locale).
"notBefore" or "notAfter" dates must use UTC (RFC 5280).
Month is one of: Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec
UTC should be specified as GMT (see ASN1_TIME_print())
"""
from time import strptime
from calendar import timegm
months = (
"Jan","Feb","Mar","Apr","May","Jun",
"Jul","Aug","Sep","Oct","Nov","Dec"
)
time_format = ' %d %H:%M:%S %Y GMT' # NOTE: no month, fixed GMT
try:
month_number = months.index(cert_time[:3].title()) + 1
except ValueError:
raise ValueError('time data %r does not match '
'format "%%b%s"' % (cert_time, time_format))
else:
# found valid month
tt = strptime(cert_time[3:], time_format)
# return an integer, the previous mktime()-based implementation
# returned a float (fractional seconds are always zero here).
return timegm((tt[0], month_number) + tt[2:6])
PEM_HEADER = "-----BEGIN CERTIFICATE-----"
PEM_FOOTER = "-----END CERTIFICATE-----"
def DER_cert_to_PEM_cert(der_cert_bytes):
"""Takes a certificate in binary DER format and returns the
PEM version of it as a string."""
f = str(base64.standard_b64encode(der_cert_bytes), 'ASCII', 'strict')
ss = [PEM_HEADER]
ss += [f[i:i+64] for i in range(0, len(f), 64)]
ss.append(PEM_FOOTER + '\n')
return '\n'.join(ss)
def PEM_cert_to_DER_cert(pem_cert_string):
"""Takes a certificate in ASCII PEM format and returns the
DER-encoded version of it as a byte sequence"""
if not pem_cert_string.startswith(PEM_HEADER):
raise ValueError("Invalid PEM encoding; must start with %s"
% PEM_HEADER)
if not pem_cert_string.strip().endswith(PEM_FOOTER):
raise ValueError("Invalid PEM encoding; must end with %s"
% PEM_FOOTER)
d = pem_cert_string.strip()[len(PEM_HEADER):-len(PEM_FOOTER)]
return base64.decodebytes(d.encode('ASCII', 'strict'))
def get_server_certificate(addr, ssl_version=PROTOCOL_TLS, ca_certs=None):
"""Retrieve the certificate from the server at the specified address,
and return it as a PEM-encoded string.
If 'ca_certs' is specified, validate the server cert against it.
If 'ssl_version' is specified, use it in the connection attempt."""
host, port = addr
if ca_certs is not None:
cert_reqs = CERT_REQUIRED
else:
cert_reqs = CERT_NONE
context = _create_stdlib_context(ssl_version,
cert_reqs=cert_reqs,
cafile=ca_certs)
with create_connection(addr) as sock:
with context.wrap_socket(sock) as sslsock:
dercert = sslsock.getpeercert(True)
return DER_cert_to_PEM_cert(dercert)
def get_protocol_name(protocol_code):
return _PROTOCOL_NAMES.get(protocol_code, '<unknown>')
| 34.412529 | 116 | 0.640012 |
793ed60d7c6ec507b6101bf3808b446956dc7102 | 440 | py | Python | PIL_templater.py | BitokuOokami/CardMachine | d927b062ca96af1313ec94de70053a1e069c20d0 | [
"MIT"
] | 1 | 2016-12-15T23:46:52.000Z | 2016-12-15T23:46:52.000Z | PIL_templater.py | BitokuOokami/CardMachine | d927b062ca96af1313ec94de70053a1e069c20d0 | [
"MIT"
] | 3 | 2016-01-12T01:42:31.000Z | 2016-03-28T23:31:53.000Z | PIL_templater.py | BitokuOokami/CardMachine | d927b062ca96af1313ec94de70053a1e069c20d0 | [
"MIT"
] | 4 | 2015-12-27T01:48:34.000Z | 2019-10-19T03:42:51.000Z | from PIL import Image, ImageFont, ImageDraw
filename = "Template.png"
bleed_w = 850
bleed_h = 1161
w = 788
h = 1088
w_marg = (bleed_w-w)/2
h_marg = (bleed_h-h)/2
image = Image.new("RGBA", (bleed_w, bleed_h))
draw = ImageDraw.Draw(image)
points = ((w_marg,h_marg),
(w_marg+w,h_marg),
(w_marg+w,h_marg+h),
(w_marg,h_marg+h),
(w_marg,h_marg))
draw.line(points,fill=(0,0,0,255), width=2)
image.save(filename, "PNG")
| 16.923077 | 45 | 0.654545 |
793ed639a38455174483f18d7cfea8ff4f599db1 | 600 | py | Python | mdls/plugins/references.py | ompugao/mdls | 7ba9fbf75fd13c6e3f85fc248cb32e966dea2399 | [
"MIT"
] | null | null | null | mdls/plugins/references.py | ompugao/mdls | 7ba9fbf75fd13c6e3f85fc248cb32e966dea2399 | [
"MIT"
] | null | null | null | mdls/plugins/references.py | ompugao/mdls | 7ba9fbf75fd13c6e3f85fc248cb32e966dea2399 | [
"MIT"
] | null | null | null | # Copyright 2017 Palantir Technologies, Inc.
import logging
from mdls import hookimpl, uris, _utils
log = logging.getLogger(__name__)
@hookimpl
def mdls_references(document, position, exclude_declaration=False):
return []
# Filter out builtin modules
return [{
'uri': uris.uri_with(document.uri, path=d.module_path) if d.module_path else document.uri,
'range': {
'start': {'line': d.line - 1, 'character': d.column},
'end': {'line': d.line - 1, 'character': d.column + len(d.name)}
}
} for d in usages if not d.in_builtin_module()]
| 30 | 98 | 0.646667 |
793ed694428f146cedfadcbb6ebf23d0667e0818 | 27,206 | py | Python | external/slim/nets/resnet_v1_test.py | thodan/epos | d67657bbb06da5a6adb8a035a2f58fc305e396f7 | [
"MIT"
] | 52 | 2020-10-02T13:16:52.000Z | 2022-03-23T06:05:12.000Z | external/slim/nets/resnet_v1_test.py | thodan/epos | d67657bbb06da5a6adb8a035a2f58fc305e396f7 | [
"MIT"
] | 7 | 2021-04-16T05:49:30.000Z | 2022-02-26T17:30:41.000Z | external/slim/nets/resnet_v1_test.py | thodan/epos | d67657bbb06da5a6adb8a035a2f58fc305e396f7 | [
"MIT"
] | 7 | 2020-10-03T01:45:47.000Z | 2022-03-06T16:41:00.000Z | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for slim.nets.resnet_v1."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from nets import resnet_utils
from nets import resnet_v1
slim = tf.contrib.slim
def create_test_input(batch_size, height, width, channels):
"""Create test input tensor.
Args:
batch_size: The number of images per batch or `None` if unknown.
height: The height of each image or `None` if unknown.
width: The width of each image or `None` if unknown.
channels: The number of channels per image or `None` if unknown.
Returns:
Either a placeholder `Tensor` of dimension
[batch_size, height, width, channels] if any of the inputs are `None` or a
constant `Tensor` with the mesh grid values along the spatial dimensions.
"""
if None in [batch_size, height, width, channels]:
return tf.placeholder(tf.float32, (batch_size, height, width, channels))
else:
return tf.cast(
np.tile(
np.reshape(
np.reshape(np.arange(height), [height, 1]) +
np.reshape(np.arange(width), [1, width]),
[1, height, width, 1]),
[batch_size, 1, 1, channels]), tf.float32)
class ResnetUtilsTest(tf.test.TestCase):
def testSubsampleThreeByThree(self):
x = tf.reshape(tf.cast(tf.range(9), tf.float32), [1, 3, 3, 1])
x = resnet_utils.subsample(x, 2)
expected = tf.reshape(tf.constant([0, 2, 6, 8]), [1, 2, 2, 1])
with self.test_session():
self.assertAllClose(x.eval(), expected.eval())
def testSubsampleFourByFour(self):
x = tf.reshape(tf.cast(tf.range(16), tf.float32), [1, 4, 4, 1])
x = resnet_utils.subsample(x, 2)
expected = tf.reshape(tf.constant([0, 2, 8, 10]), [1, 2, 2, 1])
with self.test_session():
self.assertAllClose(x.eval(), expected.eval())
def testConv2DSameEven(self):
n, n2 = 4, 2
# Input image.
x = create_test_input(1, n, n, 1)
# Convolution kernel.
w = create_test_input(1, 3, 3, 1)
w = tf.reshape(w, [3, 3, 1, 1])
tf.get_variable('Conv/weights', initializer=w)
tf.get_variable('Conv/biases', initializer=tf.zeros([1]))
tf.get_variable_scope().reuse_variables()
y1 = slim.conv2d(x, 1, [3, 3], stride=1, scope='Conv')
y1_expected = tf.cast([[14, 28, 43, 26],
[28, 48, 66, 37],
[43, 66, 84, 46],
[26, 37, 46, 22]], tf.float32)
y1_expected = tf.reshape(y1_expected, [1, n, n, 1])
y2 = resnet_utils.subsample(y1, 2)
y2_expected = tf.cast([[14, 43], [43, 84]], tf.float32)
y2_expected = tf.reshape(y2_expected, [1, n2, n2, 1])
y3 = resnet_utils.conv2d_same(x, 1, 3, stride=2, scope='Conv')
y3_expected = y2_expected
y4 = slim.conv2d(x, 1, [3, 3], stride=2, scope='Conv')
y4_expected = tf.cast([[48, 37], [37, 22]], tf.float32)
y4_expected = tf.reshape(y4_expected, [1, n2, n2, 1])
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
self.assertAllClose(y1.eval(), y1_expected.eval())
self.assertAllClose(y2.eval(), y2_expected.eval())
self.assertAllClose(y3.eval(), y3_expected.eval())
self.assertAllClose(y4.eval(), y4_expected.eval())
def testConv2DSameOdd(self):
n, n2 = 5, 3
# Input image.
x = create_test_input(1, n, n, 1)
# Convolution kernel.
w = create_test_input(1, 3, 3, 1)
w = tf.reshape(w, [3, 3, 1, 1])
tf.get_variable('Conv/weights', initializer=w)
tf.get_variable('Conv/biases', initializer=tf.zeros([1]))
tf.get_variable_scope().reuse_variables()
y1 = slim.conv2d(x, 1, [3, 3], stride=1, scope='Conv')
y1_expected = tf.cast([[14, 28, 43, 58, 34],
[28, 48, 66, 84, 46],
[43, 66, 84, 102, 55],
[58, 84, 102, 120, 64],
[34, 46, 55, 64, 30]], tf.float32)
y1_expected = tf.reshape(y1_expected, [1, n, n, 1])
y2 = resnet_utils.subsample(y1, 2)
y2_expected = tf.cast([[14, 43, 34],
[43, 84, 55],
[34, 55, 30]], tf.float32)
y2_expected = tf.reshape(y2_expected, [1, n2, n2, 1])
y3 = resnet_utils.conv2d_same(x, 1, 3, stride=2, scope='Conv')
y3_expected = y2_expected
y4 = slim.conv2d(x, 1, [3, 3], stride=2, scope='Conv')
y4_expected = y2_expected
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
self.assertAllClose(y1.eval(), y1_expected.eval())
self.assertAllClose(y2.eval(), y2_expected.eval())
self.assertAllClose(y3.eval(), y3_expected.eval())
self.assertAllClose(y4.eval(), y4_expected.eval())
def _resnet_plain(self, inputs, blocks, output_stride=None, scope=None):
"""A plain ResNet without extra layers before or after the ResNet blocks."""
with tf.variable_scope(scope, values=[inputs]):
with slim.arg_scope([slim.conv2d], outputs_collections='end_points'):
net = resnet_utils.stack_blocks_dense(inputs, blocks, output_stride)
end_points = slim.utils.convert_collection_to_dict('end_points')
return net, end_points
def testEndPointsV1(self):
"""Test the end points of a tiny v1 bottleneck network."""
blocks = [
resnet_v1.resnet_v1_block(
'block1', base_depth=1, num_units=2, stride=2),
resnet_v1.resnet_v1_block(
'block2', base_depth=2, num_units=2, stride=1),
]
inputs = create_test_input(2, 32, 16, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_plain(inputs, blocks, scope='tiny')
expected = [
'tiny/block1/unit_1/bottleneck_v1/shortcut',
'tiny/block1/unit_1/bottleneck_v1/conv1',
'tiny/block1/unit_1/bottleneck_v1/conv2',
'tiny/block1/unit_1/bottleneck_v1/conv3',
'tiny/block1/unit_2/bottleneck_v1/conv1',
'tiny/block1/unit_2/bottleneck_v1/conv2',
'tiny/block1/unit_2/bottleneck_v1/conv3',
'tiny/block2/unit_1/bottleneck_v1/shortcut',
'tiny/block2/unit_1/bottleneck_v1/conv1',
'tiny/block2/unit_1/bottleneck_v1/conv2',
'tiny/block2/unit_1/bottleneck_v1/conv3',
'tiny/block2/unit_2/bottleneck_v1/conv1',
'tiny/block2/unit_2/bottleneck_v1/conv2',
'tiny/block2/unit_2/bottleneck_v1/conv3']
self.assertItemsEqual(expected, end_points.keys())
def _stack_blocks_nondense(self, net, blocks):
"""A simplified ResNet Block stacker without output stride control."""
for block in blocks:
with tf.variable_scope(block.scope, 'block', [net]):
for i, unit in enumerate(block.args):
with tf.variable_scope('unit_%d' % (i + 1), values=[net]):
net = block.unit_fn(net, rate=1, **unit)
return net
def testAtrousValuesBottleneck(self):
"""Verify the values of dense feature extraction by atrous convolution.
Make sure that dense feature extraction by stack_blocks_dense() followed by
subsampling gives identical results to feature extraction at the nominal
model output stride using the simple self._stack_blocks_nondense() above.
"""
block = resnet_v1.resnet_v1_block
blocks = [
block('block1', base_depth=1, num_units=2, stride=2),
block('block2', base_depth=2, num_units=2, stride=2),
block('block3', base_depth=4, num_units=2, stride=2),
block('block4', base_depth=8, num_units=2, stride=1),
]
nominal_stride = 8
# Test both odd and even input dimensions.
height = 30
width = 31
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
with slim.arg_scope([slim.batch_norm], is_training=False):
for output_stride in [1, 2, 4, 8, None]:
with tf.Graph().as_default():
with self.test_session() as sess:
tf.set_random_seed(0)
inputs = create_test_input(1, height, width, 3)
# Dense feature extraction followed by subsampling.
output = resnet_utils.stack_blocks_dense(inputs,
blocks,
output_stride)
if output_stride is None:
factor = 1
else:
factor = nominal_stride // output_stride
output = resnet_utils.subsample(output, factor)
# Make the two networks use the same weights.
tf.get_variable_scope().reuse_variables()
# Feature extraction at the nominal network rate.
expected = self._stack_blocks_nondense(inputs, blocks)
sess.run(tf.global_variables_initializer())
output, expected = sess.run([output, expected])
self.assertAllClose(output, expected, atol=1e-4, rtol=1e-4)
def testStridingLastUnitVsSubsampleBlockEnd(self):
"""Compares subsampling at the block's last unit or block's end.
Makes sure that the final output is the same when we use a stride at the
last unit of a block vs. we subsample activations at the end of a block.
"""
block = resnet_v1.resnet_v1_block
blocks = [
block('block1', base_depth=1, num_units=2, stride=2),
block('block2', base_depth=2, num_units=2, stride=2),
block('block3', base_depth=4, num_units=2, stride=2),
block('block4', base_depth=8, num_units=2, stride=1),
]
# Test both odd and even input dimensions.
height = 30
width = 31
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
with slim.arg_scope([slim.batch_norm], is_training=False):
for output_stride in [1, 2, 4, 8, None]:
with tf.Graph().as_default():
with self.test_session() as sess:
tf.set_random_seed(0)
inputs = create_test_input(1, height, width, 3)
# Subsampling at the last unit of the block.
output = resnet_utils.stack_blocks_dense(
inputs, blocks, output_stride,
store_non_strided_activations=False,
outputs_collections='output')
output_end_points = slim.utils.convert_collection_to_dict(
'output')
# Make the two networks use the same weights.
tf.get_variable_scope().reuse_variables()
# Subsample activations at the end of the blocks.
expected = resnet_utils.stack_blocks_dense(
inputs, blocks, output_stride,
store_non_strided_activations=True,
outputs_collections='expected')
expected_end_points = slim.utils.convert_collection_to_dict(
'expected')
sess.run(tf.global_variables_initializer())
# Make sure that the final output is the same.
output, expected = sess.run([output, expected])
self.assertAllClose(output, expected, atol=1e-4, rtol=1e-4)
# Make sure that intermediate block activations in
# output_end_points are subsampled versions of the corresponding
# ones in expected_end_points.
for i, block in enumerate(blocks[:-1:]):
output = output_end_points[block.scope]
expected = expected_end_points[block.scope]
atrous_activated = (output_stride is not None and
2 ** i >= output_stride)
if not atrous_activated:
expected = resnet_utils.subsample(expected, 2)
output, expected = sess.run([output, expected])
self.assertAllClose(output, expected, atol=1e-4, rtol=1e-4)
class ResnetCompleteNetworkTest(tf.test.TestCase):
"""Tests with complete small ResNet v1 model."""
def _resnet_small(self,
inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
include_root_block=True,
spatial_squeeze=True,
reuse=None,
scope='resnet_v1_small'):
"""A shallow and thin ResNet v1 for faster tests."""
block = resnet_v1.resnet_v1_block
blocks = [
block('block1', base_depth=1, num_units=3, stride=2),
block('block2', base_depth=2, num_units=3, stride=2),
block('block3', base_depth=4, num_units=3, stride=2),
block('block4', base_depth=8, num_units=2, stride=1),
]
return resnet_v1.resnet_v1(inputs, blocks, num_classes,
is_training=is_training,
global_pool=global_pool,
output_stride=output_stride,
include_root_block=include_root_block,
spatial_squeeze=spatial_squeeze,
reuse=reuse,
scope=scope)
def testClassificationEndPoints(self):
global_pool = True
num_classes = 10
inputs = create_test_input(2, 224, 224, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
logits, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
spatial_squeeze=False,
scope='resnet')
self.assertTrue(logits.op.name.startswith('resnet/logits'))
self.assertListEqual(logits.get_shape().as_list(), [2, 1, 1, num_classes])
self.assertTrue('predictions' in end_points)
self.assertListEqual(end_points['predictions'].get_shape().as_list(),
[2, 1, 1, num_classes])
self.assertTrue('global_pool' in end_points)
self.assertListEqual(end_points['global_pool'].get_shape().as_list(),
[2, 1, 1, 32])
def testClassificationEndPointsWithNoBatchNormArgscope(self):
global_pool = True
num_classes = 10
inputs = create_test_input(2, 224, 224, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
logits, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
spatial_squeeze=False,
is_training=None,
scope='resnet')
self.assertTrue(logits.op.name.startswith('resnet/logits'))
self.assertListEqual(logits.get_shape().as_list(), [2, 1, 1, num_classes])
self.assertTrue('predictions' in end_points)
self.assertListEqual(end_points['predictions'].get_shape().as_list(),
[2, 1, 1, num_classes])
self.assertTrue('global_pool' in end_points)
self.assertListEqual(end_points['global_pool'].get_shape().as_list(),
[2, 1, 1, 32])
def testEndpointNames(self):
# Like ResnetUtilsTest.testEndPointsV1(), but for the public API.
global_pool = True
num_classes = 10
inputs = create_test_input(2, 224, 224, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
scope='resnet')
expected = ['resnet/conv1']
for block in range(1, 5):
for unit in range(1, 4 if block < 4 else 3):
for conv in range(1, 4):
expected.append('resnet/block%d/unit_%d/bottleneck_v1/conv%d' %
(block, unit, conv))
expected.append('resnet/block%d/unit_%d/bottleneck_v1' % (block, unit))
expected.append('resnet/block%d/unit_1/bottleneck_v1/shortcut' % block)
expected.append('resnet/block%d' % block)
expected.extend(['global_pool', 'resnet/logits', 'resnet/spatial_squeeze',
'predictions'])
self.assertItemsEqual(end_points.keys(), expected)
def testClassificationShapes(self):
global_pool = True
num_classes = 10
inputs = create_test_input(2, 224, 224, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 28, 28, 4],
'resnet/block2': [2, 14, 14, 8],
'resnet/block3': [2, 7, 7, 16],
'resnet/block4': [2, 7, 7, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testFullyConvolutionalEndpointShapes(self):
global_pool = False
num_classes = 10
inputs = create_test_input(2, 321, 321, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
spatial_squeeze=False,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 41, 41, 4],
'resnet/block2': [2, 21, 21, 8],
'resnet/block3': [2, 11, 11, 16],
'resnet/block4': [2, 11, 11, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testRootlessFullyConvolutionalEndpointShapes(self):
global_pool = False
num_classes = 10
inputs = create_test_input(2, 128, 128, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
include_root_block=False,
spatial_squeeze=False,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 64, 64, 4],
'resnet/block2': [2, 32, 32, 8],
'resnet/block3': [2, 16, 16, 16],
'resnet/block4': [2, 16, 16, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testAtrousFullyConvolutionalEndpointShapes(self):
global_pool = False
num_classes = 10
output_stride = 8
inputs = create_test_input(2, 321, 321, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs,
num_classes,
global_pool=global_pool,
output_stride=output_stride,
spatial_squeeze=False,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 41, 41, 4],
'resnet/block2': [2, 41, 41, 8],
'resnet/block3': [2, 41, 41, 16],
'resnet/block4': [2, 41, 41, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testAtrousFullyConvolutionalValues(self):
"""Verify dense feature extraction with atrous convolution."""
nominal_stride = 32
for output_stride in [4, 8, 16, 32, None]:
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
with tf.Graph().as_default():
with self.test_session() as sess:
tf.set_random_seed(0)
inputs = create_test_input(2, 81, 81, 3)
# Dense feature extraction followed by subsampling.
output, _ = self._resnet_small(inputs, None, is_training=False,
global_pool=False,
output_stride=output_stride)
if output_stride is None:
factor = 1
else:
factor = nominal_stride // output_stride
output = resnet_utils.subsample(output, factor)
# Make the two networks use the same weights.
tf.get_variable_scope().reuse_variables()
# Feature extraction at the nominal network rate.
expected, _ = self._resnet_small(inputs, None, is_training=False,
global_pool=False)
sess.run(tf.global_variables_initializer())
self.assertAllClose(output.eval(), expected.eval(),
atol=1e-4, rtol=1e-4)
def testUnknownBatchSize(self):
batch = 2
height, width = 65, 65
global_pool = True
num_classes = 10
inputs = create_test_input(None, height, width, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
logits, _ = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
spatial_squeeze=False,
scope='resnet')
self.assertTrue(logits.op.name.startswith('resnet/logits'))
self.assertListEqual(logits.get_shape().as_list(),
[None, 1, 1, num_classes])
images = create_test_input(batch, height, width, 3)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(logits, {inputs: images.eval()})
self.assertEqual(output.shape, (batch, 1, 1, num_classes))
def testFullyConvolutionalUnknownHeightWidth(self):
batch = 2
height, width = 65, 65
global_pool = False
inputs = create_test_input(batch, None, None, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
output, _ = self._resnet_small(inputs, None, global_pool=global_pool)
self.assertListEqual(output.get_shape().as_list(),
[batch, None, None, 32])
images = create_test_input(batch, height, width, 3)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(output, {inputs: images.eval()})
self.assertEqual(output.shape, (batch, 3, 3, 32))
def testAtrousFullyConvolutionalUnknownHeightWidth(self):
batch = 2
height, width = 65, 65
global_pool = False
output_stride = 8
inputs = create_test_input(batch, None, None, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
output, _ = self._resnet_small(inputs,
None,
global_pool=global_pool,
output_stride=output_stride)
self.assertListEqual(output.get_shape().as_list(),
[batch, None, None, 32])
images = create_test_input(batch, height, width, 3)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(output, {inputs: images.eval()})
self.assertEqual(output.shape, (batch, 9, 9, 32))
def testDepthMultiplier(self):
resnets = [
resnet_v1.resnet_v1_50, resnet_v1.resnet_v1_101,
resnet_v1.resnet_v1_152, resnet_v1.resnet_v1_200
]
resnet_names = [
'resnet_v1_50', 'resnet_v1_101', 'resnet_v1_152', 'resnet_v1_200'
]
for resnet, resnet_name in zip(resnets, resnet_names):
depth_multiplier = 0.25
global_pool = True
num_classes = 10
inputs = create_test_input(2, 224, 224, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
scope_base = resnet_name + '_base'
_, end_points_base = resnet(
inputs,
num_classes,
global_pool=global_pool,
min_base_depth=1,
scope=scope_base)
scope_test = resnet_name + '_test'
_, end_points_test = resnet(
inputs,
num_classes,
global_pool=global_pool,
min_base_depth=1,
depth_multiplier=depth_multiplier,
scope=scope_test)
for block in ['block1', 'block2', 'block3', 'block4']:
block_name_base = scope_base + '/' + block
block_name_test = scope_test + '/' + block
self.assertTrue(block_name_base in end_points_base)
self.assertTrue(block_name_test in end_points_test)
self.assertEqual(
len(end_points_base[block_name_base].get_shape().as_list()), 4)
self.assertEqual(
len(end_points_test[block_name_test].get_shape().as_list()), 4)
self.assertListEqual(
end_points_base[block_name_base].get_shape().as_list()[:3],
end_points_test[block_name_test].get_shape().as_list()[:3])
self.assertEqual(
int(depth_multiplier *
end_points_base[block_name_base].get_shape().as_list()[3]),
end_points_test[block_name_test].get_shape().as_list()[3])
def testMinBaseDepth(self):
resnets = [
resnet_v1.resnet_v1_50, resnet_v1.resnet_v1_101,
resnet_v1.resnet_v1_152, resnet_v1.resnet_v1_200
]
resnet_names = [
'resnet_v1_50', 'resnet_v1_101', 'resnet_v1_152', 'resnet_v1_200'
]
for resnet, resnet_name in zip(resnets, resnet_names):
min_base_depth = 5
global_pool = True
num_classes = 10
inputs = create_test_input(2, 224, 224, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = resnet(
inputs,
num_classes,
global_pool=global_pool,
min_base_depth=min_base_depth,
depth_multiplier=0,
scope=resnet_name)
for block in ['block1', 'block2', 'block3', 'block4']:
block_name = resnet_name + '/' + block
self.assertTrue(block_name in end_points)
self.assertEqual(
len(end_points[block_name].get_shape().as_list()), 4)
# The output depth is 4 times base_depth.
depth_expected = min_base_depth * 4
self.assertEqual(
end_points[block_name].get_shape().as_list()[3], depth_expected)
if __name__ == '__main__':
tf.test.main()
| 43.184127 | 80 | 0.600015 |
793ed6a49c44650d0cc4a0658c68d9bd8bab9f5a | 931 | py | Python | scipy/cluster/setup.py | aragilar/scipy | cc3bfa91f662999996cd1cfdec4465bb9943ab1c | [
"BSD-3-Clause"
] | 5 | 2016-01-25T16:46:43.000Z | 2021-11-04T04:25:09.000Z | scipy/cluster/setup.py | aragilar/scipy | cc3bfa91f662999996cd1cfdec4465bb9943ab1c | [
"BSD-3-Clause"
] | null | null | null | scipy/cluster/setup.py | aragilar/scipy | cc3bfa91f662999996cd1cfdec4465bb9943ab1c | [
"BSD-3-Clause"
] | 2 | 2015-06-05T03:34:50.000Z | 2021-10-31T08:50:49.000Z | #!/usr/bin/env python
from __future__ import division, print_function, absolute_import
import sys
if sys.version_info[0] >= 3:
DEFINE_MACROS = [("SCIPY_PY3K", None)]
else:
DEFINE_MACROS = []
def configuration(parent_package='', top_path=None):
from numpy.distutils.system_info import get_info
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('cluster', parent_package, top_path)
blas_opt = get_info('lapack_opt')
config.add_data_dir('tests')
config.add_extension('_vq',
sources=[('_vq.c')],
include_dirs=[get_numpy_include_dirs()],
extra_info=blas_opt)
config.add_extension('_hierarchy',
sources=[('_hierarchy.c')],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| 25.861111 | 79 | 0.703545 |
793ed801d0f28d84366ed4dfe82f4bf6cc97b82d | 24,488 | py | Python | src/sqlfluff/rules/L016.py | fawix/sqlfluff | d9f64445b60a4746619c3cb577cf157e632a4862 | [
"MIT"
] | 1 | 2022-03-03T02:29:11.000Z | 2022-03-03T02:29:11.000Z | src/sqlfluff/rules/L016.py | clairetaylor352/sqlfluff | 62900332228db323da323ce20df0c5e17ba9fcbf | [
"MIT"
] | null | null | null | src/sqlfluff/rules/L016.py | clairetaylor352/sqlfluff | 62900332228db323da323ce20df0c5e17ba9fcbf | [
"MIT"
] | null | null | null | """Implementation of Rule L016."""
from typing import cast, List, Optional, Sequence, Tuple
from sqlfluff.core.parser import (
BaseSegment,
NewlineSegment,
RawSegment,
WhitespaceSegment,
)
from sqlfluff.core.rules.base import LintFix, LintResult, RuleContext
from sqlfluff.core.rules.functional import sp
from sqlfluff.core.rules.doc_decorators import (
document_fix_compatible,
document_configuration,
)
from sqlfluff.rules.L003 import Rule_L003
@document_fix_compatible
@document_configuration
class Rule_L016(Rule_L003):
"""Line is too long."""
_check_docstring = False
config_keywords = [
"max_line_length",
"tab_space_size",
"indent_unit",
"ignore_comment_lines",
"ignore_comment_clauses",
]
def _eval_line_for_breaks(self, segments: List[RawSegment]) -> List[LintFix]:
"""Evaluate the line for break points.
We split the line into a few particular sections:
- The indent (all the whitespace up to this point)
- Content (which doesn't have whitespace at the start or end)
- Breakpoint (which contains Indent/Dedent and potential
whitespace). NB: If multiple indent/dedent sections share
a breakpoint, then they will occupy the SAME one, so that
dealing with whitespace post-split is easier.
- Pausepoint (which is a comma, potentially surrounded by
whitespace). This is for potential list splitting.
Once split, we'll use a separate method to work out what
combinations make most sense for reflow.
"""
chunk_buff = []
indent_section = None
class Section:
def __init__(
self,
segments: Sequence[RawSegment],
role: str,
indent_balance: int,
indent_impulse: Optional[int] = None,
):
self.segments = segments
self.role = role
self.indent_balance = indent_balance
self.indent_impulse: int = indent_impulse or 0
def __repr__(self):
return (
"<Section @ {pos}: {role} [{indent_balance}:{indent_impulse}]. "
"{segments!r}>".format(
role=self.role,
indent_balance=self.indent_balance,
indent_impulse=self.indent_impulse,
segments="".join(elem.raw for elem in self.segments),
pos=self.segments[0].get_start_point_marker()
if self.segments
else "",
)
)
@property
def raw(self) -> str:
return "".join(seg.raw for seg in self.segments)
@staticmethod
def find_segment_at(segments, loc: Tuple[int, int]) -> RawSegment:
for seg in segments:
if not seg.is_meta and seg.pos_marker.working_loc == loc:
return seg
raise ValueError("Segment not found") # pragma: no cover
def generate_fixes_to_coerce(
self,
segments: List[RawSegment],
indent_section: "Section",
crawler: Rule_L016,
indent: int,
) -> List[LintFix]:
"""Generate a list of fixes to create a break at this point.
The `segments` argument is necessary to extract anchors
from the existing segments.
"""
fixes = []
# Generate some sample indents:
unit_indent = crawler._make_indent(
indent_unit=crawler.indent_unit,
tab_space_size=crawler.tab_space_size,
)
indent_p1 = indent_section.raw + unit_indent
if unit_indent in indent_section.raw:
indent_m1 = indent_section.raw.replace(unit_indent, "", 1)
else:
indent_m1 = indent_section.raw
if indent > 0:
new_indent = indent_p1
elif indent < 0:
new_indent = indent_m1
else:
new_indent = indent_section.raw
create_anchor = self.find_segment_at(
segments, self.segments[-1].get_end_loc()
)
if self.role == "pausepoint":
# Assume that this means there isn't a breakpoint
# and that we'll break with the same indent as the
# existing line.
# NOTE: Deal with commas and binary operators differently here.
# Maybe only deal with commas to start with?
if any(
seg.is_type("binary_operator") for seg in self.segments
): # pragma: no cover
raise NotImplementedError(
"Don't know how to deal with binary operators here yet!!"
)
# Remove any existing whitespace
for elem in self.segments:
if not elem.is_meta and elem.is_type("whitespace"):
fixes.append(LintFix.delete(elem))
# Create a newline and a similar indent
fixes.append(
LintFix.create_before(
create_anchor,
[
NewlineSegment(),
WhitespaceSegment(new_indent),
],
)
)
return fixes
if self.role == "breakpoint":
# Can we determine the required indent just from
# the info in this segment only?
# Remove anything which is already here
for elem in self.segments:
if not elem.is_meta:
fixes.append(LintFix.delete(elem))
# Create a newline, create an indent of the relevant size
fixes.append(
LintFix.create_before(
create_anchor,
[
NewlineSegment(),
WhitespaceSegment(new_indent),
],
)
)
return fixes
raise ValueError(
f"Unexpected break generated at {self}"
) # pragma: no cover
segment_buff: Tuple[RawSegment, ...] = ()
whitespace_buff: Tuple[RawSegment, ...] = ()
indent_impulse = 0
indent_balance = 0
is_pause = False
seg: RawSegment
for seg in segments:
if indent_section is None:
if seg.is_type("whitespace") or seg.is_meta:
whitespace_buff += (seg,)
else:
indent_section = Section(
segments=whitespace_buff,
role="indent",
indent_balance=indent_balance,
)
whitespace_buff = ()
segment_buff = (seg,)
else:
if seg.is_type("whitespace") or seg.is_meta:
whitespace_buff += (seg,)
if seg.is_meta:
indent_impulse += seg.indent_val
else:
# We got something other than whitespace or a meta.
# Have we passed an indent?
if indent_impulse != 0:
# Yes. Bank the section, perhaps also with a content
# section.
if segment_buff:
chunk_buff.append(
Section(
segments=segment_buff,
role="content",
indent_balance=indent_balance,
)
)
segment_buff = ()
# Deal with the whitespace
chunk_buff.append(
Section(
segments=whitespace_buff,
role="breakpoint",
indent_balance=indent_balance,
indent_impulse=indent_impulse,
)
)
whitespace_buff = ()
indent_balance += indent_impulse
indent_impulse = 0
# Did we think we were in a pause?
# TODO: Renable binary operator breaks some time in future.
if is_pause:
# We need to end the comma/operator
# (taking any whitespace with it).
chunk_buff.append(
Section(
segments=segment_buff + whitespace_buff,
role="pausepoint",
indent_balance=indent_balance,
)
)
# Start the segment buffer off with this section.
whitespace_buff = ()
segment_buff = (seg,)
is_pause = False
else:
# We're not in a pause (or not in a pause yet)
if seg.name == "comma": # or seg.is_type('binary_operator')
if segment_buff:
# End the previous section, start a comma/operator.
# Any whitespace is added to the segment
# buff to go with the comma.
chunk_buff.append(
Section(
segments=segment_buff,
role="content",
indent_balance=indent_balance,
)
)
segment_buff = ()
# Having a double comma should be impossible
# but let's deal with that case regardless.
segment_buff += whitespace_buff + (seg,)
whitespace_buff = ()
is_pause = True
else:
# Not in a pause, it's not a comma, were in
# some content.
segment_buff += whitespace_buff + (seg,)
whitespace_buff = ()
# We're at the end, do we have anything left?
if is_pause:
role = "pausepoint"
elif segment_buff:
role = "content"
elif indent_impulse: # pragma: no cover
role = "breakpoint"
else:
# This can happen, e.g. with a long template line. Treat it as
# unfixable.
return []
chunk_buff.append(
Section(
segments=segment_buff + whitespace_buff,
role=role,
indent_balance=indent_balance,
)
)
self.logger.info("Sections:")
for idx, sec in enumerate(chunk_buff):
self.logger.info(f" {idx}: {sec!r}")
# How do we prioritise where to work?
# First, do we ever go through a negative breakpoint?
lowest_bal = min(sec.indent_balance for sec in chunk_buff)
split_at = [] # split_at is probably going to be a list.
if lowest_bal < 0:
for sec in chunk_buff:
if sec.indent_balance == 0 and sec.indent_impulse < 0:
split_at = [(sec, -1)]
break
# Assuming we never go negative, we'll either use a pause
# point in the base indent balance, or we'll split out
# a section or two using the lowest breakpoints.
else:
# Look for low level pauses. Additionally, ignore
# them if they're a comma at the end of the line,
# they're useless for splitting
pauses = [
sec
for sec in chunk_buff
if sec.role == "pausepoint" and sec.indent_balance == 0
# Not the last chunk
and sec is not chunk_buff[-1]
]
if any(pauses):
split_at = [(pause, 0) for pause in pauses]
else:
# No pauses and no negatives. We should extract
# a subsection using the breakpoints.
# We'll definitely have an up. It's possible that the *down*
# might not be on this line, so we have to allow for that case.
upbreaks = [
sec
for sec in chunk_buff
if sec.role == "breakpoint"
and sec.indent_balance == 0
and sec.indent_impulse > 0
]
if not upbreaks:
# No upbreaks?!
# abort
return []
# First up break
split_at = [(upbreaks[0], 1)]
downbreaks = [
sec
for sec in chunk_buff
if sec.role == "breakpoint"
and sec.indent_balance + sec.indent_impulse == 0
and sec.indent_impulse < 0
]
# First down break where we reach the base
if downbreaks:
split_at.append((downbreaks[0], 0))
# If no downbreaks then the corresponding downbreak isn't on this line.
self.logger.info("Split at: %s", split_at)
fixes = []
for split, indent in split_at:
if split.segments:
assert indent_section
fixes += split.generate_fixes_to_coerce(
segments, indent_section, self, indent
)
self.logger.info("Fixes: %s", fixes)
return fixes
@staticmethod
def _gen_line_so_far(raw_stack: Tuple[RawSegment, ...]) -> List[RawSegment]:
"""Work out from the raw stack what the elements on this line are.
Returns:
:obj:`list` of segments
"""
working_buff: List[RawSegment] = []
idx = -1
while True:
if len(raw_stack) >= abs(idx):
s = raw_stack[idx]
if s.name == "newline":
break
else:
working_buff.insert(0, s)
idx -= 1
else:
break # pragma: no cover
return working_buff
@classmethod
def _compute_segment_length(cls, segment: BaseSegment) -> int:
if segment.is_type("newline"):
# Generally, we won't see newlines, but if we do, simply ignore
# them. Rationale: The intent of this rule is to enforce maximum
# line length, and newlines don't make lines longer.
return 0
if "\n" in segment.pos_marker.source_str():
# Similarly we shouldn't see newlines in source segments
# However for templated loops it's often not possible to
# accurately calculate the segments. These will be caught by
# the first iteration of the loop (which is non-templated)
# so doesn't suffer from the same bug, so we can ignore these
return 0
# Compute the length of this segments in SOURCE space (before template
# expansion).
slice_length = (
segment.pos_marker.source_slice.stop - segment.pos_marker.source_slice.start
)
if slice_length:
return slice_length
else:
# If a segment did not originate from the original source, its slice
# length slice length will be zero. This occurs, for example, when
# other lint rules add indentation or other whitespace. In that
# case, compute the length of its contents.
return len(segment.raw)
def _compute_source_length(
self, segments: Sequence[BaseSegment], memory: dict
) -> int:
line_len = 0
seen_slices = set()
for segment in segments:
if self.ignore_comment_clauses and segment in memory["comment_clauses"]:
continue
slice = (
segment.pos_marker.source_slice.start,
segment.pos_marker.source_slice.stop,
)
# Often, a single templated area of a source file will expand to
# multiple SQL tokens. Here, we use a set to avoid double counting
# the length of that text. For example, in BigQuery, we might
# see this source query:
#
# SELECT user_id
# FROM `{{bi_ecommerce_orders}}` {{table_at_job_start}}
#
# where 'table_at_job_start' is defined as:
# "FOR SYSTEM_TIME AS OF CAST('2021-03-02T01:22:59+00:00' AS TIMESTAMP)"
#
# So this one substitution results in roughly 10 segments (one per
# word or bit of punctuation). Each of these would have the same
# source slice, and if we didn't correct for this, we'd count the
# length of {{bi_ecommerce_orders}} roughly 10 times, resulting in
# vast overcount of the source length.
#
# :TRICKY: New segments (i.e. those introduced by earlier fixes)
# have empty source slices. We definitely want to count the length
# of these segments. We can be sure they aren't the tricky templated
# segment case described above because new segments are never templated
# (because "sqlfluff fix" produced them, not the templater!).
if (
slice[0] == slice[1] and not segment.is_meta
) or slice not in seen_slices:
seen_slices.add(slice)
line_len += self._compute_segment_length(segment)
return line_len
def _eval(self, context: RuleContext) -> Optional[LintResult]:
"""Line is too long.
This only triggers on newline segments, evaluating the whole line.
The detection is simple, the fixing is much trickier.
"""
# Config type hints
self.max_line_length: int
self.ignore_comment_lines: bool
self.ignore_comment_clauses: bool
if not context.memory:
memory: dict = {"comment_clauses": set()}
else:
memory = context.memory
if context.segment.name == "newline":
# iterate to buffer the whole line up to this point
this_line = self._gen_line_so_far(context.raw_stack)
else:
if self.ignore_comment_clauses and context.segment.is_type(
"comment_clause", "comment_equals_clause"
):
comment_segment = context.functional.segment.children().first(
sp.is_name("quoted_literal")
)
if comment_segment:
memory["comment_clauses"].add(comment_segment.get())
# Otherwise we're all good
return LintResult(memory=memory)
# Now we can work out the line length and deal with the content
line_len = self._compute_source_length(this_line, memory)
if line_len > self.max_line_length:
# Problem, we'll be reporting a violation. The
# question is, can we fix it?
# We'll need the indent, so let's get it for fixing.
line_indent = []
for s in this_line:
if s.name == "whitespace":
line_indent.append(s)
else:
break
# Don't even attempt to handle template placeholders as gets
# complicated if logic changes (e.g. moving for loops). Most of
# these long lines will likely be single line Jinja comments.
# They will remain as unfixable.
if this_line[-1].type == "placeholder":
self.logger.info("Unfixable template segment: %s", this_line[-1])
return LintResult(anchor=context.segment, memory=memory)
# Does the line end in an inline comment that we can move back?
if this_line[-1].name == "inline_comment":
# Is this line JUST COMMENT (with optional preceding whitespace) if
# so, user will have to fix themselves.
if len(this_line) == 1 or all(
elem.name == "whitespace" or elem.is_meta for elem in this_line[:-1]
):
self.logger.info(
"Unfixable inline comment, alone on line: %s", this_line[-1]
)
if self.ignore_comment_lines:
return LintResult(memory=memory)
else:
return LintResult(anchor=context.segment, memory=memory)
self.logger.info(
"Attempting move of inline comment at end of line: %s",
this_line[-1],
)
# Set up to delete the original comment and the preceding whitespace
delete_buffer = [LintFix.delete(this_line[-1])]
idx = -2
while True:
if (
len(this_line) >= abs(idx)
and this_line[idx].name == "whitespace"
):
delete_buffer.append(LintFix.delete(this_line[idx]))
idx -= 1
else:
break # pragma: no cover
create_elements = line_indent + [
this_line[-1],
cast(RawSegment, context.segment),
]
if (
self._compute_source_length(create_elements, memory)
> self.max_line_length
):
# The inline comment is NOT on a line by itself, but even if
# we move it onto a line by itself, it's still too long. In
# this case, the rule should do nothing, otherwise it
# triggers an endless cycle of "fixes" that simply keeps
# adding blank lines.
self.logger.info(
"Unfixable inline comment, too long even on a line by itself: "
"%s",
this_line[-1],
)
if self.ignore_comment_lines:
return LintResult(memory=memory)
else:
return LintResult(anchor=context.segment, memory=memory)
# Create a newline before this one with the existing comment, an
# identical indent AND a terminating newline, copied from the current
# target segment.
create_buffer = [LintFix.create_before(this_line[0], create_elements)]
return LintResult(
anchor=context.segment,
fixes=delete_buffer + create_buffer,
memory=memory,
)
fixes = self._eval_line_for_breaks(this_line)
if fixes:
return LintResult(anchor=context.segment, fixes=fixes, memory=memory)
return LintResult(anchor=context.segment, memory=memory)
return LintResult(memory=memory)
| 41.717206 | 88 | 0.493834 |
793ed9cd720ccd6ce3740512d6e6ac8592822ddd | 6,960 | py | Python | intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_fmupdate_analyzer_virusreport.py | Stienvdh/statrick | 7b092fc42171e226718a70a285a4b323f2f395ad | [
"MIT"
] | null | null | null | intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_fmupdate_analyzer_virusreport.py | Stienvdh/statrick | 7b092fc42171e226718a70a285a4b323f2f395ad | [
"MIT"
] | null | null | null | intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_fmupdate_analyzer_virusreport.py | Stienvdh/statrick | 7b092fc42171e226718a70a285a4b323f2f395ad | [
"MIT"
] | null | null | null | #!/usr/bin/python
from __future__ import absolute_import, division, print_function
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_fmupdate_analyzer_virusreport
short_description: Send virus detection notification to FortiGuard.
description:
- This module is able to configure a FortiManager device.
- Examples include all parameters and values which need to be adjusted to data sources before usage.
version_added: "2.10"
author:
- Link Zheng (@chillancezen)
- Jie Xue (@JieX19)
- Frank Shen (@fshen01)
- Hongbin Lu (@fgtdev-hblu)
notes:
- Running in workspace locking mode is supported in this FortiManager module, the top
level parameters workspace_locking_adom and workspace_locking_timeout help do the work.
- To create or update an object, use state present directive.
- To delete an object, use state absent directive.
- Normally, running one module can fail when a non-zero rc is returned. you can also override
the conditions to fail or succeed with parameters rc_failed and rc_succeeded
options:
bypass_validation:
description: only set to True when module schema diffs with FortiManager API structure, module continues to execute without validating parameters
required: false
type: bool
default: false
workspace_locking_adom:
description: the adom to lock for FortiManager running in workspace mode, the value can be global and others including root
required: false
type: str
workspace_locking_timeout:
description: the maximum time in seconds to wait for other user to release the workspace lock
required: false
type: int
default: 300
state:
description: the directive to create, update or delete an object
type: str
required: true
choices:
- present
- absent
rc_succeeded:
description: the rc codes list with which the conditions to succeed will be overriden
type: list
required: false
rc_failed:
description: the rc codes list with which the conditions to fail will be overriden
type: list
required: false
fmupdate_analyzer_virusreport:
description: the top level parameters set
required: false
type: dict
suboptions:
status:
type: str
default: 'enable'
description:
- 'Enable/disable sending virus detection notification to FortiGuard (default = enable).'
- 'disable - Disable setting.'
- 'enable - Enable setting.'
choices:
- 'disable'
- 'enable'
'''
EXAMPLES = '''
- hosts: fortimanager-inventory
collections:
- fortinet.fortimanager
connection: httpapi
vars:
ansible_httpapi_use_ssl: True
ansible_httpapi_validate_certs: False
ansible_httpapi_port: 443
tasks:
- name: Send virus detection notification to FortiGuard.
fmgr_fmupdate_analyzer_virusreport:
bypass_validation: False
workspace_locking_adom: <value in [global, custom adom including root]>
workspace_locking_timeout: 300
rc_succeeded: [0, -2, -3, ...]
rc_failed: [-2, -3, ...]
fmupdate_analyzer_virusreport:
status: <value in [disable, enable]>
'''
RETURN = '''
request_url:
description: The full url requested
returned: always
type: str
sample: /sys/login/user
response_code:
description: The status of api request
returned: always
type: int
sample: 0
response_message:
description: The descriptive message of the api response
type: str
returned: always
sample: OK.
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import NAPIManager
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_galaxy_version
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_parameter_bypass
def main():
jrpc_urls = [
'/cli/global/fmupdate/analyzer/virusreport'
]
perobject_jrpc_urls = [
'/cli/global/fmupdate/analyzer/virusreport/{virusreport}'
]
url_params = []
module_primary_key = None
module_arg_spec = {
'bypass_validation': {
'type': 'bool',
'required': False,
'default': False
},
'workspace_locking_adom': {
'type': 'str',
'required': False
},
'workspace_locking_timeout': {
'type': 'int',
'required': False,
'default': 300
},
'rc_succeeded': {
'required': False,
'type': 'list'
},
'rc_failed': {
'required': False,
'type': 'list'
},
'fmupdate_analyzer_virusreport': {
'required': False,
'type': 'dict',
'options': {
'status': {
'required': False,
'choices': [
'disable',
'enable'
],
'type': 'str'
}
}
}
}
params_validation_blob = []
check_galaxy_version(module_arg_spec)
module = AnsibleModule(argument_spec=check_parameter_bypass(module_arg_spec, 'fmupdate_analyzer_virusreport'),
supports_check_mode=False)
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
fmgr = NAPIManager(jrpc_urls, perobject_jrpc_urls, module_primary_key, url_params, module, connection, top_level_schema_name='data')
fmgr.validate_parameters(params_validation_blob)
fmgr.process_partial_curd()
else:
module.fail_json(msg='MUST RUN IN HTTPAPI MODE')
module.exit_json(meta=module.params)
if __name__ == '__main__':
main()
| 33.142857 | 153 | 0.642098 |
793eda0f9a127931b9c011fad2ce0f76e6cf5b9b | 117 | py | Python | PyDarkLogic/MainDarkLogic/enumfun.py | BlackWalker01/BlackLogic | 5f49956a0d4ee7a10c35e006d340ba58ca692078 | [
"MIT"
] | 6 | 2020-04-21T22:25:02.000Z | 2021-03-05T17:44:24.000Z | PyDarkLogic/MainDarkLogic/enumfun.py | BlackWalker01/BlackLogic | 5f49956a0d4ee7a10c35e006d340ba58ca692078 | [
"MIT"
] | 1 | 2020-08-04T22:35:39.000Z | 2020-08-09T12:40:46.000Z | PyDarkLogic/MainDarkLogic/enumfun.py | BlackWalker01/BlackLogic | 5f49956a0d4ee7a10c35e006d340ba58ca692078 | [
"MIT"
] | null | null | null | from enum import Enum
class EnumFun(Enum):
NONE=0
GET_ACTION=1
PUSH_ACTION=2
POP_ACTION=3
HELP=4 | 14.625 | 21 | 0.666667 |
793edba550f650680b3a3f8afdddb252ef80b93f | 2,265 | py | Python | gui.py | sguldemond/instagram-downloader | ca5228b695f4bab42a2dc9b7250ee3fa320e7f20 | [
"MIT"
] | null | null | null | gui.py | sguldemond/instagram-downloader | ca5228b695f4bab42a2dc9b7250ee3fa320e7f20 | [
"MIT"
] | null | null | null | gui.py | sguldemond/instagram-downloader | ca5228b695f4bab42a2dc9b7250ee3fa320e7f20 | [
"MIT"
] | null | null | null | from tkinter import *
from tkinter import filedialog
import instagram
class Window(Frame):
def __init__(self, master=None):
Frame.__init__(self, master)
self.master = master
self.init_window()
self.folder_path = StringVar()
def init_window(self):
self.master.title("Shanon")
self.pack(fill=BOTH, expand=1)
self.y_loc = [[0, 20]]
y_loc = self.y_loc
for x in range(6):
y_loc.append([int(y_loc[x][0]) + 50, int(y_loc[x][1]) + 50])
print(y_loc)
Label(self, text="Username:").place(x=10, y=y_loc[0][0])
self.username_input = Entry(self, width=40)
self.username_input.place(x=10, y=y_loc[0][1])
Label(self, text="Password:").place(x=10, y=y_loc[1][0])
self.password_input = Entry(self, show='*', width=40)
self.password_input.place(x=10, y=y_loc[1][1])
login_button = Button(self, text="Login", width=10, command=self.login)
login_button.place(x=10, y=y_loc[2][0]+10)
Label(self, text="Instagram image URL:").place(x=10, y=y_loc[3][0])
self.url_input = Entry(self, width=40)
self.url_input.place(x=10, y=y_loc[3][1])
Label(self, text="Download location:").place(x=10, y=y_loc[4][0])
self.download_location = Button(self, text="Browse", width=40, command=self.browse)
self.download_location.place(x=10, y=y_loc[4][1])
download_button = Button(self, text="Download", width=10, command=self.download)
download_button.place(x=10, y=y_loc[6][0]+10)
quit_button = Button(self, text="Quit", width=10, command=self.client_exit)
quit_button.place(x=280, y=y_loc[6][0]+10)
def browse(self):
folder = filedialog.askdirectory()
self.folder_path.set(folder)
Label(self, text=self.folder_path.get() + '/').place(x=10, y=self.y_loc[5][0])
print(self.folder_path.get())
def login(self):
instagram.login(self.username_input.get(), self.password_input.get())
def download(self):
instagram.get_image(self.url_input.get(), self.folder_path.get() + '/')
def client_exit(self):
exit()
root = Tk()
root.geometry("400x400")
app = Window(root)
root.mainloop() | 31.901408 | 91 | 0.618985 |
793edc702d9d7ec25abc0c2f0ab6d8894b291c10 | 1,414 | py | Python | src/joint_embedding/metrics/ari/test.py | hhuuggoo/neurips2021_multimodal_viash | f17af09a34ecac6309bcd7b5f8f5122e09318e0c | [
"MIT"
] | null | null | null | src/joint_embedding/metrics/ari/test.py | hhuuggoo/neurips2021_multimodal_viash | f17af09a34ecac6309bcd7b5f8f5122e09318e0c | [
"MIT"
] | null | null | null | src/joint_embedding/metrics/ari/test.py | hhuuggoo/neurips2021_multimodal_viash | f17af09a34ecac6309bcd7b5f8f5122e09318e0c | [
"MIT"
] | null | null | null | from os import path
import subprocess
import anndata as ad
# import pandas as pd
import numpy as np
np.random.seed(42)
metric = 'ari'
# metric_file = metric + '.tsv'
metric_file = metric + '.h5ad'
print(">> Running script")
out = subprocess.check_output([
"./" + metric,
"--input_prediction", 'resources_test/joint_embedding/test_resource.prediction.h5ad',
"--input_solution", 'resources_test/joint_embedding/test_resource.solution.h5ad',
"--output", metric_file
]).decode("utf-8")
print(">> Checking whether file exists")
assert path.exists(metric_file)
# result = pd.read_table(metric_file)
result = ad.read_h5ad(metric_file)
sol = ad.read_h5ad('resources_test/joint_embedding/test_resource.solution.h5ad')
pred = ad.read_h5ad('resources_test/joint_embedding/test_resource.prediction.h5ad')
# print(">> Check that score makes sense")
# assert result.shape == (1, 4)
# score = result.loc[0, 'value']
print(">> Check contents of result.uns")
assert 'dataset_id' in result.uns
assert result.uns['dataset_id'] == sol.uns['dataset_id']
assert 'method_id' in result.uns
assert result.uns['method_id'] == pred.uns['method_id']
assert 'metric_ids' in result.uns
assert result.uns['metric_ids'] == [metric]
assert 'metric_values' in result.uns
score = result.uns['metric_values'][0]
print(score)
assert 0 <= score <= 1
assert score == 0.336331837241985
print(">> All tests passed successfully")
| 28.857143 | 89 | 0.736209 |
793edcd6ac1f1bffd01513eccbe59ffd4cea18e4 | 607 | py | Python | test-framework/test-suites/integration/tests/list/test_list_host_profile.py | knutsonchris/stacki | 33087dd5fa311984a66ccecfeee6f9c2c25f665d | [
"BSD-3-Clause"
] | null | null | null | test-framework/test-suites/integration/tests/list/test_list_host_profile.py | knutsonchris/stacki | 33087dd5fa311984a66ccecfeee6f9c2c25f665d | [
"BSD-3-Clause"
] | null | null | null | test-framework/test-suites/integration/tests/list/test_list_host_profile.py | knutsonchris/stacki | 33087dd5fa311984a66ccecfeee6f9c2c25f665d | [
"BSD-3-Clause"
] | null | null | null | import xml.etree.ElementTree as ET
def test_list_host_profile(host, add_host, revert_export_stack_carts):
result = host.run('stack list host profile backend-0-0')
assert result.rc == 0
# Check if this is an actual XML output that can be parsed
root = ET.fromstring(result.stdout)
# Check for a few expected tags and attributes
# This could be more and more variable as we go deeper, so I don't check a lot.
assert root.tag == "profile"
assert root.attrib == {'type': 'native'}
for child in root:
assert child.tag == "chapter"
for grandchild in child:
assert grandchild.tag == "section"
| 30.35 | 80 | 0.729819 |
793edd6ce6a150d894873a3fbf08ea68f8d0838a | 1,004 | py | Python | kubernetes/test/test_v1_object_field_selector.py | woqer/python | 3a6fe8231cefe1fa39a0a69d4b2f33044ab32745 | [
"Apache-2.0"
] | 1 | 2019-07-12T05:38:06.000Z | 2019-07-12T05:38:06.000Z | kubernetes/test/test_v1_object_field_selector.py | woqer/python | 3a6fe8231cefe1fa39a0a69d4b2f33044ab32745 | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_v1_object_field_selector.py | woqer/python | 3a6fe8231cefe1fa39a0a69d4b2f33044ab32745 | [
"Apache-2.0"
] | 1 | 2021-05-18T12:25:56.000Z | 2021-05-18T12:25:56.000Z | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.11.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_object_field_selector import V1ObjectFieldSelector
class TestV1ObjectFieldSelector(unittest.TestCase):
""" V1ObjectFieldSelector unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1ObjectFieldSelector(self):
"""
Test V1ObjectFieldSelector
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1_object_field_selector.V1ObjectFieldSelector()
pass
if __name__ == '__main__':
unittest.main()
| 22.311111 | 105 | 0.721116 |
793eddb3c6c93046599a11d56d962f45dfba2dce | 730 | py | Python | PythonCrashCourse/13alien_invasion/settings.py | dzylikecode/Python_Tutorial | bff425b11d6eeaa5733c1c710a570f83c52e4d97 | [
"MIT"
] | null | null | null | PythonCrashCourse/13alien_invasion/settings.py | dzylikecode/Python_Tutorial | bff425b11d6eeaa5733c1c710a570f83c52e4d97 | [
"MIT"
] | null | null | null | PythonCrashCourse/13alien_invasion/settings.py | dzylikecode/Python_Tutorial | bff425b11d6eeaa5733c1c710a570f83c52e4d97 | [
"MIT"
] | null | null | null | """
每次给游戏添加新功能时,
通常也将引入一些新设置.
下面来编写一个名为settings的模块,其中包含一个名为Settings的类,
用于将所有设置存储在一个地方,以免在代码中到处添加设置.
这样,我们就能传递一个设置对象,而不是众多不同的设置.
另外,这让函数调用更简单,且在项目增大时修改游戏的外观更容易:
要修改游戏,只需修改settings.py中的一些值,而无需查找散布在文件中的不同设置.
"""
class Settings():
"""store the game settings"""
def __init__(self):
"""Initializes the Settings of the game"""
# screen settings
self.screen_width = 1200
self.screen_height = 800
self.bg_color = (230, 230, 230)
# ship settings
self.ship_speed_factor = 1.5
# bullet settings
self.bullet_speed_factor = 1
self.bullet_width = 3
self.bullet_height = 15
self.bullet_color = (60, 60, 60)
self.bullets_allowed = 3
| 26.071429 | 50 | 0.661644 |
793ede887610b810b96c819eb446ec74e6cd6f95 | 2,194 | py | Python | camera/cam_test.py | robot-ronny/rr-raspbian | 08d08797cab752dcf8a0130ca0ba1abe864c1d70 | [
"MIT"
] | 1 | 2019-04-03T19:44:57.000Z | 2019-04-03T19:44:57.000Z | camera/cam_test.py | robot-ronny/rr-raspbian | 08d08797cab752dcf8a0130ca0ba1abe864c1d70 | [
"MIT"
] | null | null | null | camera/cam_test.py | robot-ronny/rr-raspbian | 08d08797cab752dcf8a0130ca0ba1abe864c1d70 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import time
import cv2
import numpy as np
import click
import click_log
import logging
from imutils.video import VideoStream
logging.basicConfig(format='%(asctime)s %(message)s')
def nothing(x):
pass
@click.command()
@click.option('--video', required=True, help='Video stream number', default=0, type=int)
@click_log.simple_verbosity_option(default='INFO')
def run(video):
logging.info("Process started")
vs = VideoStream(src=video).start()
cv2.namedWindow("Trackbars")
minHSV = np.array([165, 132, 98])
maxHSV = np.array([195, 255, 255])
cv2.createTrackbar("minH", "Trackbars", 0, 255, nothing)
cv2.createTrackbar("minS", "Trackbars", 0, 255, nothing)
cv2.createTrackbar("minV", "Trackbars", 0, 255, nothing)
cv2.setTrackbarPos("minH", "Trackbars", minHSV[0])
cv2.setTrackbarPos("minS", "Trackbars", minHSV[1])
cv2.setTrackbarPos("minV", "Trackbars", minHSV[2])
cv2.createTrackbar("maxH", "Trackbars", 0, 255, nothing)
cv2.setTrackbarPos("maxH", "Trackbars", maxHSV[0])
time.sleep(2.0)
logging.info('Loop start')
counter = 0
while True:
logging.info("Frame read")
time.sleep(0.05)
image = vs.read()
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
minH = cv2.getTrackbarPos("minH", "Trackbars")
minS = cv2.getTrackbarPos("minS", "Trackbars")
minV = cv2.getTrackbarPos("minV", "Trackbars")
maxH = cv2.getTrackbarPos("maxH", "Trackbars")
lowerLimit = np.uint8([minH, minS, minV])
upperLimit = np.uint8([maxH, 255, 255])
mask = cv2.inRange(hsv, lowerLimit, upperLimit)
result = cv2.bitwise_and(image , image , mask=mask)
cv2.imshow("frame", image)
cv2.imshow("mask", mask)
cv2.imshow("result", result)
key = cv2.waitKey(1)
if key == 27:
break
vs.stop()
cv2.destroyAllWindows()
def main():
run()
# try:
# run()
# except KeyboardInterrupt:
# pass
# except Exception as e:
# logging.error(e)
# sys.exit(1)
if __name__ == '__main__':
main()
| 22.161616 | 88 | 0.616226 |
793edf2c093d692ef3d97ac9bd44d72b1be4add8 | 2,405 | py | Python | pdf_combine.py | psengupta1973/py-utils | 133d3917e4b9cae46e86c52cacf604f2feb6ac95 | [
"MIT"
] | null | null | null | pdf_combine.py | psengupta1973/py-utils | 133d3917e4b9cae46e86c52cacf604f2feb6ac95 | [
"MIT"
] | null | null | null | pdf_combine.py | psengupta1973/py-utils | 133d3917e4b9cae46e86c52cacf604f2feb6ac95 | [
"MIT"
] | null | null | null | import PyPDF2
def combine(pdf_names, pdf_out):
pdfFile = None
pdfWriter = PyPDF2.PdfFileWriter()
for pdf_name in pdf_names:
pdfFile = open(pdf_name, 'rb')
pdfReader = PyPDF2.PdfFileReader(pdfFile)
for pageNum in range(pdfReader.numPages):
pageObj = pdfReader.getPage(pageNum)
pdfWriter.addPage(pageObj)
#pdfFile.close()
pdfOutputFile = open(pdf_out, 'wb')
pdfWriter.write(pdfOutputFile)
if pdfFile is not None:
pdfFile.close()
pdfOutputFile.close()
def main():
pdf_names = ['DeedOfConv1-p1s1.pdf',
'DeedOfConv1-p1s2.pdf',
'DeedOfConv1-p2s1.pdf',
'DeedOfConv1-p2s2.pdf',
'DeedOfConv1-p3s1.pdf',
'DeedOfConv1-p3s2.pdf',
'DeedOfConv1-p4s1.pdf',
'DeedOfConv1-p4s2.pdf',
'DeedOfConv1-p5s1.pdf',
'DeedOfConv1-p5s2.pdf',
'DeedOfConv1-p6s1.pdf',
'DeedOfConv1-p6s2.pdf',
'DeedOfConv1-p7s1.pdf',
'DeedOfConv1-p7s2.pdf',
'DeedOfConv1-p8s1.pdf',
'DeedOfConv1-p8s2.pdf',
'DeedOfConv1-p9s1.pdf',
'DeedOfConv1-p9s2.pdf',
'DeedOfConv1-p10s1.pdf',
'DeedOfConv1-p10s2.pdf',
'DeedOfConv1-p11s1.pdf',
'DeedOfConv1-p11s2.pdf',
'DeedOfConv1-p12s1.pdf',
'DeedOfConv1-p12s2.pdf',
'DeedOfConv1-p13s1.pdf',
'DeedOfConv1-p13s2.pdf',
'DeedOfConv1-p14s1.pdf',
'DeedOfConv1-p14s2.pdf',
'DeedOfConv1-p15s1.pdf',
'DeedOfConv1-p15s2.pdf',
'DeedOfConv1-p16s1.pdf',
'DeedOfConv1-p16s2.pdf',
'DeedOfConv1-p17s1.pdf',
'DeedOfConv1-p17s2.pdf',
'DeedOfConv1-p18s1.pdf',
'DeedOfConv1-p18s2.pdf',
'DeedOfConv1-p19s1.pdf',
'DeedOfConv1-p19s2.pdf',
'DeedOfConv1-p20s1.pdf',
'DeedOfConv1-p20s2.pdf',
'DeedOfConv1-p21s1.pdf']
pdf_out = 'MergedFiles.pdf'
combine(pdf_names, pdf_out)
main() | 34.855072 | 50 | 0.490644 |
793edf881288c7a8c3c44f575daffe4083a4c7b4 | 1,251 | py | Python | alphafold/model/prng_test.py | milot-mirdita/alphafold | 8a24cc8d22feb2b7ae9bf78f910ce97e9d4403e8 | [
"Apache-2.0"
] | 45 | 2021-07-18T05:26:21.000Z | 2022-03-15T18:22:48.000Z | alphafold/model/prng_test.py | milot-mirdita/alphafold | 8a24cc8d22feb2b7ae9bf78f910ce97e9d4403e8 | [
"Apache-2.0"
] | 33 | 2021-08-12T14:06:16.000Z | 2022-03-31T18:47:49.000Z | alphafold/model/prng_test.py | milot-mirdita/alphafold | 8a24cc8d22feb2b7ae9bf78f910ce97e9d4403e8 | [
"Apache-2.0"
] | 20 | 2021-07-18T03:34:47.000Z | 2022-02-19T02:51:09.000Z | # Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for prng."""
from absl.testing import absltest
import jax
from alphafold.model import prng
class PrngTest(absltest.TestCase):
def test_key_reuse(self):
init_key = jax.random.PRNGKey(42)
safe_key = prng.SafeKey(init_key)
_, safe_key = safe_key.split()
raw_key = safe_key.get()
self.assertNotEqual(raw_key[0], init_key[0])
self.assertNotEqual(raw_key[1], init_key[1])
with self.assertRaises(RuntimeError):
safe_key.get()
with self.assertRaises(RuntimeError):
safe_key.split()
with self.assertRaises(RuntimeError):
safe_key.duplicate()
if __name__ == '__main__':
absltest.main()
| 26.0625 | 74 | 0.730616 |
793edfa354035da1517d2dd7910f6ac34d33867c | 4,622 | py | Python | src/lava/lib/dl/slayer/block/cuba.py | PeaBrane/lava-dl | b205b4e0466788c5232ff20497ac0fc433cbccca | [
"BSD-3-Clause"
] | null | null | null | src/lava/lib/dl/slayer/block/cuba.py | PeaBrane/lava-dl | b205b4e0466788c5232ff20497ac0fc433cbccca | [
"BSD-3-Clause"
] | null | null | null | src/lava/lib/dl/slayer/block/cuba.py | PeaBrane/lava-dl | b205b4e0466788c5232ff20497ac0fc433cbccca | [
"BSD-3-Clause"
] | null | null | null | # Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
"""CUBA-LIF layer blocks"""
import torch
from . import base
from ..neuron import cuba
from ..synapse import layer as synapse
from ..axon import Delay
class AbstractCuba(torch.nn.Module):
"""Abstract block class for Current Based Leaky Integrator neuron. This
should never be instantiated on it's own.
"""
def __init__(self, *args, **kwargs):
super(AbstractCuba, self).__init__(*args, **kwargs)
if self.neuron_params is not None:
self.neuron = cuba.Neuron(**self.neuron_params)
delay = kwargs['delay'] if 'delay' in kwargs.keys() else False
self.delay = Delay(max_delay=62) if delay is True else None
del self.neuron_params
def _doc_from_base(base_doc):
""" """
return base_doc.__doc__.replace(
'Abstract', 'CUBA LIF'
).replace(
'neuron parameter', 'CUBA LIF neuron parameter'
).replace(
'This should never be instantiated on its own.',
'The block is 8 bit quantization ready.'
)
class Input(AbstractCuba, base.AbstractInput):
def __init__(self, *args, **kwargs):
super(Input, self).__init__(*args, **kwargs)
if self.neuron is not None:
self.pre_hook_fx = self.neuron.quantize_8bit
Input.__doc__ = _doc_from_base(base.AbstractInput)
class Flatten(base.AbstractFlatten):
def __init__(self, *args, **kwargs):
super(Flatten, self).__init__(*args, **kwargs)
Flatten.__doc__ = _doc_from_base(base.AbstractFlatten)
class Average(base.AbstractAverage):
def __init__(self, *args, **kwargs):
super(Average, self).__init__(*args, **kwargs)
Average.__doc__ = _doc_from_base(base.AbstractAverage)
class Affine(AbstractCuba, base.AbstractAffine):
def __init__(self, *args, **kwargs):
super(Affine, self).__init__(*args, **kwargs)
self.synapse = synapse.Dense(**self.synapse_params)
if 'pre_hook_fx' not in kwargs.keys():
self.synapse.pre_hook_fx = self.neuron.quantize_8bit
self.neuron._threshold = None
# this disables spike and reset in dynamics
del self.synapse_params
Affine.__doc__ = _doc_from_base(base.AbstractAffine)
class TimeDecimation(base.AbstractTimeDecimation):
def __init__(self, *args, **kwargs):
super(TimeDecimation, self).__init__(*args, **kwargs)
TimeDecimation.__doc__ = _doc_from_base(base.AbstractTimeDecimation)
class Dense(AbstractCuba, base.AbstractDense):
def __init__(self, *args, **kwargs):
super(Dense, self).__init__(*args, **kwargs)
self.synapse = synapse.Dense(**self.synapse_params)
if 'pre_hook_fx' not in kwargs.keys():
self.synapse.pre_hook_fx = self.neuron.quantize_8bit
del self.synapse_params
Dense.__doc__ = _doc_from_base(base.AbstractDense)
class Conv(AbstractCuba, base.AbstractConv):
def __init__(self, *args, **kwargs):
super(Conv, self).__init__(*args, **kwargs)
self.synapse = synapse.Conv(**self.synapse_params)
if 'pre_hook_fx' not in kwargs.keys():
self.synapse.pre_hook_fx = self.neuron.quantize_8bit
del self.synapse_params
Conv.__doc__ = _doc_from_base(base.AbstractConv)
class Pool(AbstractCuba, base.AbstractPool):
def __init__(self, *args, **kwargs):
super(Pool, self).__init__(*args, **kwargs)
self.synapse = synapse.Pool(**self.synapse_params)
if 'pre_hook_fx' not in kwargs.keys():
self.synapse.pre_hook_fx = self.neuron.quantize_8bit
del self.synapse_params
Pool.__doc__ = _doc_from_base(base.AbstractPool)
class KWTA(AbstractCuba, base.AbstractKWTA):
def __init__(self, *args, **kwargs):
super(KWTA, self).__init__(*args, **kwargs)
self.synapse = synapse.Dense(**self.synapse_params)
if 'pre_hook_fx' not in kwargs.keys():
self.synapse.pre_hook_fx = self.neuron.quantize_8bit
del self.synapse_params
KWTA.__doc__ = _doc_from_base(base.AbstractKWTA)
class Recurrent(AbstractCuba, base.AbstractRecurrent):
def __init__(self, *args, **kwargs):
super(Recurrent, self).__init__(*args, **kwargs)
self.input_synapse = synapse.Dense(**self.synapse_params)
self.recurrent_synapse = synapse.Dense(**self.recurrent_params)
self.input_synapse.pre_hook_fx = self.neuron.quantize_8bit
self.recurrent_synapse.pre_hook_fx = self.neuron.quantize_8bit
del self.synapse_params
del self.recurrent_params
Recurrent.__doc__ = _doc_from_base(base.AbstractRecurrent)
| 31.442177 | 75 | 0.690394 |
793ee158b06bdb6a19483f1391badad5dae73ac0 | 9,939 | py | Python | virt/ansible-latest/lib/python2.7/site-packages/ansible/modules/cloud/azure/azure_rm_cdnprofile.py | lakhlaifi/RedHat-Ansible | 27c5077cced9d416081fcd5d69ea44bca0317fa4 | [
"Apache-2.0"
] | 1 | 2020-03-29T18:41:01.000Z | 2020-03-29T18:41:01.000Z | ansible/ansible/modules/cloud/azure/azure_rm_cdnprofile.py | SergeyCherepanov/ansible | 875711cd2fd6b783c812241c2ed7a954bf6f670f | [
"MIT"
] | 7 | 2020-09-07T17:27:56.000Z | 2022-03-02T06:25:46.000Z | ansible/ansible/modules/cloud/azure/azure_rm_cdnprofile.py | SergeyCherepanov/ansible | 875711cd2fd6b783c812241c2ed7a954bf6f670f | [
"MIT"
] | 1 | 2020-03-22T01:04:48.000Z | 2020-03-22T01:04:48.000Z | #!/usr/bin/python
#
# Copyright (c) 2018 Hai Cao, <t-haicao@microsoft.com>, Yunge Zhu <yungez@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_cdnprofile
version_added: "2.8"
short_description: Manage a Azure CDN profile.
description:
- Create, update and delete a Azure CDN profile.
options:
resource_group:
description:
- Name of a resource group where the CDN profile exists or will be created.
required: true
name:
description:
- Name of the CDN profile.
required: true
location:
description:
- Valid azure location. Defaults to location of the resource group.
sku:
description:
- The pricing tier, defines a CDN provider, feature list and rate of the CDN profile.
- Detailed pricing can be find at U(https://azure.microsoft.com/en-us/pricing/details/cdn/)
choices:
- standard_verizon
- premium_verizon
- custom_verizon
- standard_akamai
- standard_chinacdn
- standard_microsoft
state:
description:
- Assert the state of the CDN profile. Use C(present) to create or update a CDN profile and C(absent) to delete it.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Hai Cao (@caohai) <t-haicao@microsoft.com>"
- "Yunge Zhu (@yungezz) <yungez@microsoft.com>"
'''
EXAMPLES = '''
- name: Create a CDN profile
azure_rm_cdnprofile:
resource_group: myResourceGroup
name: cdntest
sku: standard_akamai
tags:
testing: testing
- name: Delete the CDN profile
azure_rm_cdnprofile:
resource_group: myResourceGroup
name: cdntest
state: absent
'''
RETURN = '''
id:
description: Current state of the CDN profile
returned: always
type: dict
example:
id: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourcegroups/cdntest/providers/Microsoft.Cdn/profiles/cdntest
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from azure.mgmt.cdn.models import Profile, Sku, ErrorResponseException
from azure.mgmt.cdn import CdnManagementClient
except ImportError:
# This is handled in azure_rm_common
pass
def cdnprofile_to_dict(cdnprofile):
return dict(
id=cdnprofile.id,
name=cdnprofile.name,
type=cdnprofile.type,
location=cdnprofile.location,
sku=cdnprofile.sku.name,
resource_state=cdnprofile.resource_state,
provisioning_state=cdnprofile.provisioning_state,
tags=cdnprofile.tags
)
class AzureRMCdnprofile(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
location=dict(
type='str'
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
),
sku=dict(
type='str',
choices=['standard_verizon', 'premium_verizon', 'custom_verizon', 'standard_akamai', 'standard_chinacdn', 'standard_microsoft']
)
)
self.resource_group = None
self.name = None
self.location = None
self.state = None
self.tags = None
self.sku = None
self.cdn_client = None
required_if = [
('state', 'present', ['sku'])
]
self.results = dict(changed=False)
super(AzureRMCdnprofile, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True,
required_if=required_if)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
self.cdn_client = self.get_cdn_client()
to_be_updated = False
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
self.location = resource_group.location
response = self.get_cdnprofile()
if self.state == 'present':
if not response:
self.log("Need to create the CDN profile")
if not self.check_mode:
new_response = self.create_cdnprofile()
self.results['id'] = new_response['id']
self.results['changed'] = True
else:
self.log('Results : {0}'.format(response))
update_tags, response['tags'] = self.update_tags(response['tags'])
if response['provisioning_state'] == "Succeeded":
if update_tags:
to_be_updated = True
if to_be_updated:
self.log("Need to update the CDN profile")
if not self.check_mode:
new_response = self.update_cdnprofile()
self.results['id'] = new_response['id']
self.results['changed'] = True
elif self.state == 'absent':
if not response:
self.fail("CDN profile {0} not exists.".format(self.name))
else:
self.log("Need to delete the CDN profile")
self.results['changed'] = True
if not self.check_mode:
self.delete_cdnprofile()
self.results['id'] = response['id']
return self.results
def create_cdnprofile(self):
'''
Creates a Azure CDN profile.
:return: deserialized Azure CDN profile instance state dictionary
'''
self.log("Creating the Azure CDN profile instance {0}".format(self.name))
parameters = Profile(
location=self.location,
sku=Sku(name=self.sku),
tags=self.tags
)
import uuid
xid = str(uuid.uuid1())
try:
poller = self.cdn_client.profiles.create(self.resource_group,
self.name,
parameters,
custom_headers={'x-ms-client-request-id': xid}
)
response = self.get_poller_result(poller)
return cdnprofile_to_dict(response)
except ErrorResponseException as exc:
self.log('Error attempting to create Azure CDN profile instance.')
self.fail("Error creating Azure CDN profile instance: {0}.\n Request id: {1}".format(exc.message, xid))
def update_cdnprofile(self):
'''
Updates a Azure CDN profile.
:return: deserialized Azure CDN profile instance state dictionary
'''
self.log("Updating the Azure CDN profile instance {0}".format(self.name))
try:
poller = self.cdn_client.profiles.update(self.resource_group, self.name, self.tags)
response = self.get_poller_result(poller)
return cdnprofile_to_dict(response)
except ErrorResponseException as exc:
self.log('Error attempting to update Azure CDN profile instance.')
self.fail("Error updating Azure CDN profile instance: {0}".format(exc.message))
def delete_cdnprofile(self):
'''
Deletes the specified Azure CDN profile in the specified subscription and resource group.
:return: True
'''
self.log("Deleting the CDN profile {0}".format(self.name))
try:
poller = self.cdn_client.profiles.delete(
self.resource_group, self.name)
self.get_poller_result(poller)
return True
except ErrorResponseException as e:
self.log('Error attempting to delete the CDN profile.')
self.fail("Error deleting the CDN profile: {0}".format(e.message))
return False
def get_cdnprofile(self):
'''
Gets the properties of the specified CDN profile.
:return: deserialized CDN profile state dictionary
'''
self.log(
"Checking if the CDN profile {0} is present".format(self.name))
try:
response = self.cdn_client.profiles.get(self.resource_group, self.name)
self.log("Response : {0}".format(response))
self.log("CDN profile : {0} found".format(response.name))
return cdnprofile_to_dict(response)
except ErrorResponseException:
self.log('Did not find the CDN profile.')
return False
def get_cdn_client(self):
if not self.cdn_client:
self.cdn_client = self.get_mgmt_svc_client(CdnManagementClient,
base_url=self._cloud_environment.endpoints.resource_manager,
api_version='2017-04-02')
return self.cdn_client
def main():
"""Main execution"""
AzureRMCdnprofile()
if __name__ == '__main__':
main()
| 32.586885 | 143 | 0.567763 |
793ee39648762acab709bfb72c2245bd220ca905 | 5,065 | py | Python | userbot/modules/deezload.py | vyprlover/One4uBot | dab5f4ef36ff7cd91bd82487abbd7e4149840030 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | userbot/modules/deezload.py | vyprlover/One4uBot | dab5f4ef36ff7cd91bd82487abbd7e4149840030 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | userbot/modules/deezload.py | vyprlover/One4uBot | dab5f4ef36ff7cd91bd82487abbd7e4149840030 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | # Copyright (C) 2020 The Authors UniBorg (telegram userbot)
#
# Licensed under the Raphielscape Public License, Version 1.d (the "License");
# you may not use this file except in compliance with the License.
#
# requires: deezloader hachoir Pillow
# Ported from UniBorg by AnggaR96s
import deezloader
import os
import shutil
import time
from userbot.events import register
from userbot import CMD_HELP, DEEZER_ARL_TOKEN, TEMP_DOWNLOAD_DIRECTORY
from hachoir.metadata import extractMetadata
from hachoir.parser import createParser
from telethon.tl.types import DocumentAttributeAudio
@register(outgoing=True, pattern="^\.deez (.+?|) (FLAC|MP3\_320|MP3\_256|MP3\_128)")
async def _(event):
"""DeezLoader by @An0nimia
Ported for UniBorg by @SpEcHlDe"""
if event.fwd_from:
return
strings = {
"name": "DeezLoad",
"arl_token_cfg_doc": "`ARL Token for Deezer`",
"invalid_arl_token": "`Please set the required variables for this module`",
"wrong_cmd_syntax": "`Bruh, now i think how far should we go. please terminate my Session 🥺`",
"server_error": "`We're experiencing technical difficulties.`",
"processing": "`Downloading..`"
}
ARL_TOKEN = DEEZER_ARL_TOKEN
if ARL_TOKEN is None:
await event.edit(strings["invalid_arl_token"])
return
try:
loader = deezloader.Login(ARL_TOKEN)
except Exception as er:
await event.edit(str(er))
return
temp_dl_path = os.path.join(TEMP_DOWNLOAD_DIRECTORY, str(time.time()))
if not os.path.exists(temp_dl_path):
os.makedirs(temp_dl_path)
required_link = event.pattern_match.group(1)
required_qty = event.pattern_match.group(2)
await event.edit(strings["processing"])
if "spotify" in required_link:
if "track" in required_link:
required_track = loader.download_trackspo(
required_link,
output=temp_dl_path,
quality=required_qty,
recursive_quality=True,
recursive_download=True,
not_interface=True
)
await upload_track(required_track, event)
shutil.rmtree(temp_dl_path)
await event.delete()
elif "album" in required_link:
reqd_albums = loader.download_albumspo(
required_link,
output=temp_dl_path,
quality=required_qty,
recursive_quality=True,
recursive_download=True,
not_interface=True,
zips=False
)
for required_track in reqd_albums:
await upload_track(required_track, event)
shutil.rmtree(temp_dl_path)
await event.delete()
elif "deezer" in required_link:
if "track" in required_link:
required_track = loader.download_trackdee(
required_link,
output=temp_dl_path,
quality=required_qty,
recursive_quality=True,
recursive_download=True,
not_interface=True
)
await upload_track(required_track, event)
shutil.rmtree(temp_dl_path)
await event.delete()
elif "album" in required_link:
reqd_albums = loader.download_albumdee(
required_link,
output=temp_dl_path,
quality=required_qty,
recursive_quality=True,
recursive_download=True,
not_interface=True,
zips=False
)
for required_track in reqd_albums:
await upload_track(required_track, event)
shutil.rmtree(temp_dl_path)
await event.delete()
else:
await event.edit(strings["wrong_cmd_syntax"])
async def upload_track(track_location, message):
metadata = extractMetadata(createParser(track_location))
duration = 0
title = ""
performer = ""
if metadata.has("duration"):
duration = metadata.get("duration").seconds
if metadata.has("title"):
title = metadata.get("title")
if metadata.has("artist"):
performer = metadata.get("artist")
document_attributes = [
DocumentAttributeAudio(
duration=duration,
voice=False,
title=title,
performer=performer,
waveform=None
)
]
supports_streaming = True
force_document = False
caption_rts = os.path.basename(track_location)
await message.client.send_file(
message.chat_id,
track_location,
caption=caption_rts,
force_document=force_document,
supports_streaming=supports_streaming,
allow_cache=False,
attributes=document_attributes,
)
os.remove(track_location)
CMD_HELP.update({
"deezload":
".deez <spotify/deezer link> **Format Music**"
"\nUsage: Download music from deezer or spotify."
})
| 31.65625 | 102 | 0.616584 |
793ee4a65472f31025359514281013a7d0db957a | 21,225 | py | Python | sdk/python/pulumi_azure_native/servicebus/v20170401/outputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/servicebus/v20170401/outputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/servicebus/v20170401/outputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = [
'ActionResponse',
'CorrelationFilterResponse',
'MessageCountDetailsResponse',
'NWRuleSetIpRulesResponse',
'NWRuleSetVirtualNetworkRulesResponse',
'SBSkuResponse',
'SqlFilterResponse',
'SubnetResponse',
]
@pulumi.output_type
class ActionResponse(dict):
"""
Represents the filter actions which are allowed for the transformation of a message that have been matched by a filter expression.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "compatibilityLevel":
suggest = "compatibility_level"
elif key == "requiresPreprocessing":
suggest = "requires_preprocessing"
elif key == "sqlExpression":
suggest = "sql_expression"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ActionResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ActionResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ActionResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
compatibility_level: Optional[int] = None,
requires_preprocessing: Optional[bool] = None,
sql_expression: Optional[str] = None):
"""
Represents the filter actions which are allowed for the transformation of a message that have been matched by a filter expression.
:param int compatibility_level: This property is reserved for future use. An integer value showing the compatibility level, currently hard-coded to 20.
:param bool requires_preprocessing: Value that indicates whether the rule action requires preprocessing.
:param str sql_expression: SQL expression. e.g. MyProperty='ABC'
"""
if compatibility_level is not None:
pulumi.set(__self__, "compatibility_level", compatibility_level)
if requires_preprocessing is None:
requires_preprocessing = True
if requires_preprocessing is not None:
pulumi.set(__self__, "requires_preprocessing", requires_preprocessing)
if sql_expression is not None:
pulumi.set(__self__, "sql_expression", sql_expression)
@property
@pulumi.getter(name="compatibilityLevel")
def compatibility_level(self) -> Optional[int]:
"""
This property is reserved for future use. An integer value showing the compatibility level, currently hard-coded to 20.
"""
return pulumi.get(self, "compatibility_level")
@property
@pulumi.getter(name="requiresPreprocessing")
def requires_preprocessing(self) -> Optional[bool]:
"""
Value that indicates whether the rule action requires preprocessing.
"""
return pulumi.get(self, "requires_preprocessing")
@property
@pulumi.getter(name="sqlExpression")
def sql_expression(self) -> Optional[str]:
"""
SQL expression. e.g. MyProperty='ABC'
"""
return pulumi.get(self, "sql_expression")
@pulumi.output_type
class CorrelationFilterResponse(dict):
"""
Represents the correlation filter expression.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "contentType":
suggest = "content_type"
elif key == "correlationId":
suggest = "correlation_id"
elif key == "messageId":
suggest = "message_id"
elif key == "replyTo":
suggest = "reply_to"
elif key == "replyToSessionId":
suggest = "reply_to_session_id"
elif key == "requiresPreprocessing":
suggest = "requires_preprocessing"
elif key == "sessionId":
suggest = "session_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CorrelationFilterResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CorrelationFilterResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CorrelationFilterResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
content_type: Optional[str] = None,
correlation_id: Optional[str] = None,
label: Optional[str] = None,
message_id: Optional[str] = None,
properties: Optional[Mapping[str, str]] = None,
reply_to: Optional[str] = None,
reply_to_session_id: Optional[str] = None,
requires_preprocessing: Optional[bool] = None,
session_id: Optional[str] = None,
to: Optional[str] = None):
"""
Represents the correlation filter expression.
:param str content_type: Content type of the message.
:param str correlation_id: Identifier of the correlation.
:param str label: Application specific label.
:param str message_id: Identifier of the message.
:param Mapping[str, str] properties: dictionary object for custom filters
:param str reply_to: Address of the queue to reply to.
:param str reply_to_session_id: Session identifier to reply to.
:param bool requires_preprocessing: Value that indicates whether the rule action requires preprocessing.
:param str session_id: Session identifier.
:param str to: Address to send to.
"""
if content_type is not None:
pulumi.set(__self__, "content_type", content_type)
if correlation_id is not None:
pulumi.set(__self__, "correlation_id", correlation_id)
if label is not None:
pulumi.set(__self__, "label", label)
if message_id is not None:
pulumi.set(__self__, "message_id", message_id)
if properties is not None:
pulumi.set(__self__, "properties", properties)
if reply_to is not None:
pulumi.set(__self__, "reply_to", reply_to)
if reply_to_session_id is not None:
pulumi.set(__self__, "reply_to_session_id", reply_to_session_id)
if requires_preprocessing is None:
requires_preprocessing = True
if requires_preprocessing is not None:
pulumi.set(__self__, "requires_preprocessing", requires_preprocessing)
if session_id is not None:
pulumi.set(__self__, "session_id", session_id)
if to is not None:
pulumi.set(__self__, "to", to)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> Optional[str]:
"""
Content type of the message.
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter(name="correlationId")
def correlation_id(self) -> Optional[str]:
"""
Identifier of the correlation.
"""
return pulumi.get(self, "correlation_id")
@property
@pulumi.getter
def label(self) -> Optional[str]:
"""
Application specific label.
"""
return pulumi.get(self, "label")
@property
@pulumi.getter(name="messageId")
def message_id(self) -> Optional[str]:
"""
Identifier of the message.
"""
return pulumi.get(self, "message_id")
@property
@pulumi.getter
def properties(self) -> Optional[Mapping[str, str]]:
"""
dictionary object for custom filters
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="replyTo")
def reply_to(self) -> Optional[str]:
"""
Address of the queue to reply to.
"""
return pulumi.get(self, "reply_to")
@property
@pulumi.getter(name="replyToSessionId")
def reply_to_session_id(self) -> Optional[str]:
"""
Session identifier to reply to.
"""
return pulumi.get(self, "reply_to_session_id")
@property
@pulumi.getter(name="requiresPreprocessing")
def requires_preprocessing(self) -> Optional[bool]:
"""
Value that indicates whether the rule action requires preprocessing.
"""
return pulumi.get(self, "requires_preprocessing")
@property
@pulumi.getter(name="sessionId")
def session_id(self) -> Optional[str]:
"""
Session identifier.
"""
return pulumi.get(self, "session_id")
@property
@pulumi.getter
def to(self) -> Optional[str]:
"""
Address to send to.
"""
return pulumi.get(self, "to")
@pulumi.output_type
class MessageCountDetailsResponse(dict):
"""
Message Count Details.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "activeMessageCount":
suggest = "active_message_count"
elif key == "deadLetterMessageCount":
suggest = "dead_letter_message_count"
elif key == "scheduledMessageCount":
suggest = "scheduled_message_count"
elif key == "transferDeadLetterMessageCount":
suggest = "transfer_dead_letter_message_count"
elif key == "transferMessageCount":
suggest = "transfer_message_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MessageCountDetailsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MessageCountDetailsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MessageCountDetailsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
active_message_count: float,
dead_letter_message_count: float,
scheduled_message_count: float,
transfer_dead_letter_message_count: float,
transfer_message_count: float):
"""
Message Count Details.
:param float active_message_count: Number of active messages in the queue, topic, or subscription.
:param float dead_letter_message_count: Number of messages that are dead lettered.
:param float scheduled_message_count: Number of scheduled messages.
:param float transfer_dead_letter_message_count: Number of messages transferred into dead letters.
:param float transfer_message_count: Number of messages transferred to another queue, topic, or subscription.
"""
pulumi.set(__self__, "active_message_count", active_message_count)
pulumi.set(__self__, "dead_letter_message_count", dead_letter_message_count)
pulumi.set(__self__, "scheduled_message_count", scheduled_message_count)
pulumi.set(__self__, "transfer_dead_letter_message_count", transfer_dead_letter_message_count)
pulumi.set(__self__, "transfer_message_count", transfer_message_count)
@property
@pulumi.getter(name="activeMessageCount")
def active_message_count(self) -> float:
"""
Number of active messages in the queue, topic, or subscription.
"""
return pulumi.get(self, "active_message_count")
@property
@pulumi.getter(name="deadLetterMessageCount")
def dead_letter_message_count(self) -> float:
"""
Number of messages that are dead lettered.
"""
return pulumi.get(self, "dead_letter_message_count")
@property
@pulumi.getter(name="scheduledMessageCount")
def scheduled_message_count(self) -> float:
"""
Number of scheduled messages.
"""
return pulumi.get(self, "scheduled_message_count")
@property
@pulumi.getter(name="transferDeadLetterMessageCount")
def transfer_dead_letter_message_count(self) -> float:
"""
Number of messages transferred into dead letters.
"""
return pulumi.get(self, "transfer_dead_letter_message_count")
@property
@pulumi.getter(name="transferMessageCount")
def transfer_message_count(self) -> float:
"""
Number of messages transferred to another queue, topic, or subscription.
"""
return pulumi.get(self, "transfer_message_count")
@pulumi.output_type
class NWRuleSetIpRulesResponse(dict):
"""
Description of NetWorkRuleSet - IpRules resource.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ipMask":
suggest = "ip_mask"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NWRuleSetIpRulesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NWRuleSetIpRulesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NWRuleSetIpRulesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
action: Optional[str] = None,
ip_mask: Optional[str] = None):
"""
Description of NetWorkRuleSet - IpRules resource.
:param str action: The IP Filter Action
:param str ip_mask: IP Mask
"""
if action is None:
action = 'Allow'
if action is not None:
pulumi.set(__self__, "action", action)
if ip_mask is not None:
pulumi.set(__self__, "ip_mask", ip_mask)
@property
@pulumi.getter
def action(self) -> Optional[str]:
"""
The IP Filter Action
"""
return pulumi.get(self, "action")
@property
@pulumi.getter(name="ipMask")
def ip_mask(self) -> Optional[str]:
"""
IP Mask
"""
return pulumi.get(self, "ip_mask")
@pulumi.output_type
class NWRuleSetVirtualNetworkRulesResponse(dict):
"""
Description of VirtualNetworkRules - NetworkRules resource.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ignoreMissingVnetServiceEndpoint":
suggest = "ignore_missing_vnet_service_endpoint"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NWRuleSetVirtualNetworkRulesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NWRuleSetVirtualNetworkRulesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NWRuleSetVirtualNetworkRulesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ignore_missing_vnet_service_endpoint: Optional[bool] = None,
subnet: Optional['outputs.SubnetResponse'] = None):
"""
Description of VirtualNetworkRules - NetworkRules resource.
:param bool ignore_missing_vnet_service_endpoint: Value that indicates whether to ignore missing VNet Service Endpoint
:param 'SubnetResponse' subnet: Subnet properties
"""
if ignore_missing_vnet_service_endpoint is not None:
pulumi.set(__self__, "ignore_missing_vnet_service_endpoint", ignore_missing_vnet_service_endpoint)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
@property
@pulumi.getter(name="ignoreMissingVnetServiceEndpoint")
def ignore_missing_vnet_service_endpoint(self) -> Optional[bool]:
"""
Value that indicates whether to ignore missing VNet Service Endpoint
"""
return pulumi.get(self, "ignore_missing_vnet_service_endpoint")
@property
@pulumi.getter
def subnet(self) -> Optional['outputs.SubnetResponse']:
"""
Subnet properties
"""
return pulumi.get(self, "subnet")
@pulumi.output_type
class SBSkuResponse(dict):
"""
SKU of the namespace.
"""
def __init__(__self__, *,
name: str,
capacity: Optional[int] = None,
tier: Optional[str] = None):
"""
SKU of the namespace.
:param str name: Name of this SKU.
:param int capacity: The specified messaging units for the tier. For Premium tier, capacity are 1,2 and 4.
:param str tier: The billing tier of this particular SKU.
"""
pulumi.set(__self__, "name", name)
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def name(self) -> str:
"""
Name of this SKU.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def capacity(self) -> Optional[int]:
"""
The specified messaging units for the tier. For Premium tier, capacity are 1,2 and 4.
"""
return pulumi.get(self, "capacity")
@property
@pulumi.getter
def tier(self) -> Optional[str]:
"""
The billing tier of this particular SKU.
"""
return pulumi.get(self, "tier")
@pulumi.output_type
class SqlFilterResponse(dict):
"""
Represents a filter which is a composition of an expression and an action that is executed in the pub/sub pipeline.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "compatibilityLevel":
suggest = "compatibility_level"
elif key == "requiresPreprocessing":
suggest = "requires_preprocessing"
elif key == "sqlExpression":
suggest = "sql_expression"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SqlFilterResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SqlFilterResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SqlFilterResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
compatibility_level: Optional[int] = None,
requires_preprocessing: Optional[bool] = None,
sql_expression: Optional[str] = None):
"""
Represents a filter which is a composition of an expression and an action that is executed in the pub/sub pipeline.
:param int compatibility_level: This property is reserved for future use. An integer value showing the compatibility level, currently hard-coded to 20.
:param bool requires_preprocessing: Value that indicates whether the rule action requires preprocessing.
:param str sql_expression: The SQL expression. e.g. MyProperty='ABC'
"""
if compatibility_level is None:
compatibility_level = 20
if compatibility_level is not None:
pulumi.set(__self__, "compatibility_level", compatibility_level)
if requires_preprocessing is None:
requires_preprocessing = True
if requires_preprocessing is not None:
pulumi.set(__self__, "requires_preprocessing", requires_preprocessing)
if sql_expression is not None:
pulumi.set(__self__, "sql_expression", sql_expression)
@property
@pulumi.getter(name="compatibilityLevel")
def compatibility_level(self) -> Optional[int]:
"""
This property is reserved for future use. An integer value showing the compatibility level, currently hard-coded to 20.
"""
return pulumi.get(self, "compatibility_level")
@property
@pulumi.getter(name="requiresPreprocessing")
def requires_preprocessing(self) -> Optional[bool]:
"""
Value that indicates whether the rule action requires preprocessing.
"""
return pulumi.get(self, "requires_preprocessing")
@property
@pulumi.getter(name="sqlExpression")
def sql_expression(self) -> Optional[str]:
"""
The SQL expression. e.g. MyProperty='ABC'
"""
return pulumi.get(self, "sql_expression")
@pulumi.output_type
class SubnetResponse(dict):
"""
Properties supplied for Subnet
"""
def __init__(__self__, *,
id: str):
"""
Properties supplied for Subnet
:param str id: Resource ID of Virtual Network Subnet
"""
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID of Virtual Network Subnet
"""
return pulumi.get(self, "id")
| 35.79258 | 159 | 0.635948 |
793ee4ad2cf67cf9f4e70a00b068c1ec6716af87 | 399 | py | Python | spacebarapp/spacebarapp/wsgi.py | aharri64/Spacebar | 2a51d0059ae9853f87720ab9f62a8915daf7617c | [
"Unlicense"
] | null | null | null | spacebarapp/spacebarapp/wsgi.py | aharri64/Spacebar | 2a51d0059ae9853f87720ab9f62a8915daf7617c | [
"Unlicense"
] | null | null | null | spacebarapp/spacebarapp/wsgi.py | aharri64/Spacebar | 2a51d0059ae9853f87720ab9f62a8915daf7617c | [
"Unlicense"
] | null | null | null | """
WSGI config for spacebarapp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'spacebarapp.settings')
application = get_wsgi_application()
| 23.470588 | 78 | 0.789474 |
793ee4e3798fd79aa1febceca4baf4f76a82446b | 3,182 | py | Python | KPIAnomaly/model_bagel/testing.py | Nono17/AIOPS-Anomaly-Detection | 1bfc49ccf1c1c852a45a8e4606a9a73550184005 | [
"Apache-2.0"
] | 44 | 2021-05-21T14:30:12.000Z | 2021-12-28T12:52:38.000Z | KPIAnomaly/model_bagel/testing.py | Nono17/AIOPS-Anomaly-Detection | 1bfc49ccf1c1c852a45a8e4606a9a73550184005 | [
"Apache-2.0"
] | 1 | 2021-03-31T16:41:28.000Z | 2021-03-31T16:41:28.000Z | KPIAnomaly/model_bagel/testing.py | Nono17/AIOPS-Anomaly-Detection | 1bfc49ccf1c1c852a45a8e4606a9a73550184005 | [
"Apache-2.0"
] | 15 | 2021-03-29T13:16:56.000Z | 2022-03-13T12:49:44.000Z | import model_bagel
import numpy as np
from sklearn.metrics import precision_recall_curve
from typing import Sequence, Tuple, Dict, Optional
def _adjust_scores(labels: np.ndarray,
scores: np.ndarray,
delay: Optional[int] = None,
inplace: bool = False) -> np.ndarray:
if np.shape(scores) != np.shape(labels):
raise ValueError('`labels` and `scores` must have same shape')
if delay is None:
delay = len(scores)
splits = np.where(labels[1:] != labels[:-1])[0] + 1
is_anomaly = labels[0] == 1
adjusted_scores = np.copy(scores) if not inplace else scores
pos = 0
for part in splits:
if is_anomaly:
ptr = min(pos + delay + 1, part)
adjusted_scores[pos: ptr] = np.max(adjusted_scores[pos: ptr])
adjusted_scores[ptr: part] = np.maximum(adjusted_scores[ptr: part], adjusted_scores[pos])
is_anomaly = not is_anomaly
pos = part
part = len(labels)
if is_anomaly:
ptr = min(pos + delay + 1, part)
adjusted_scores[pos: part] = np.max(adjusted_scores[pos: ptr])
return adjusted_scores
def _ignore_missing(series_list: Sequence, missing: np.ndarray) -> Tuple[np.ndarray, ...]:
ret = []
for series in series_list:
series = np.copy(series)
ret.append(series[missing != 1])
return tuple(ret)
def _best_f1score(labels: np.ndarray, scores: np.ndarray) -> Tuple[float, float, float, float]:
precision, recall, thresholds = precision_recall_curve(y_true=labels, probas_pred=scores)
f1score = 2 * precision * recall / np.clip(precision + recall, a_min=1e-8, a_max=None)
best_threshold = thresholds[np.argmax(f1score)]
best_precision = precision[np.argmax(f1score)]
best_recall = recall[np.argmax(f1score)]
return best_threshold, best_precision, best_recall, np.max(f1score)
def get_test_results(labels: np.ndarray,
scores: np.ndarray,
missing: np.ndarray,
window_size: int,
delay: Optional[int] = None) -> Dict:
labels = labels[window_size - 1:]
scores = scores[window_size - 1:]
missing = missing[window_size - 1:]
adjusted_scores = _adjust_scores(labels=labels, scores=scores, delay=delay)
adjusted_labels, adjusted_scores = _ignore_missing([labels, adjusted_scores], missing=missing)
threshold, precision, recall, f1score = _best_f1score(labels=adjusted_labels, scores=adjusted_scores)
return {'threshold': threshold,
'precision': precision,
'recall': recall,
'f1score': f1score}
class KPIStats:
def __init__(self, kpi: model_bagel.data.KPI):
self.num_points = len(kpi.values)
self.num_missing = len(kpi.missing[kpi.missing == 1])
self.num_anomaly = len(kpi.labels[kpi.labels == 1])
self.missing_rate = self.num_missing / self.num_points
self.anomaly_rate = self.num_anomaly / self.num_points
def get_kpi_stats(*kpis: model_bagel.data.KPI) -> Tuple[KPIStats, ...]:
ret = []
for kpi in kpis:
ret.append(KPIStats(kpi))
return tuple(ret)
| 37.435294 | 105 | 0.646135 |
793ee611666c8256667511e6be208a87739c2834 | 2,141 | py | Python | octavia/tests/functional/api/v1/test_listener_statistics.py | zjchao/octavia | e07031fa78604568c6e2112cb4cb147661bc57d7 | [
"Apache-2.0"
] | 1 | 2019-01-11T06:18:38.000Z | 2019-01-11T06:18:38.000Z | octavia/tests/functional/api/v1/test_listener_statistics.py | zjchao/octavia | e07031fa78604568c6e2112cb4cb147661bc57d7 | [
"Apache-2.0"
] | 10 | 2020-09-18T12:17:59.000Z | 2022-03-14T15:45:38.000Z | octavia/tests/functional/api/v1/test_listener_statistics.py | zjchao/octavia | e07031fa78604568c6e2112cb4cb147661bc57d7 | [
"Apache-2.0"
] | 2 | 2020-03-15T01:24:15.000Z | 2020-07-22T20:34:26.000Z | # Copyright 2016 Blue Box, an IBM Company
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from octavia.common import constants
from octavia.tests.functional.api.v1 import base
from oslo_utils import uuidutils
class TestListenerStatistics(base.BaseAPITest):
FAKE_UUID_1 = uuidutils.generate_uuid()
def setUp(self):
super(TestListenerStatistics, self).setUp()
self.lb = self.create_load_balancer(
{'subnet_id': uuidutils.generate_uuid()})
self.set_lb_status(self.lb.get('id'))
self.listener = self.create_listener(self.lb.get('id'),
constants.PROTOCOL_HTTP, 80)
self.set_lb_status(self.lb.get('id'))
self.ls_path = self.LISTENER_STATS_PATH.format(
lb_id=self.lb.get('id'), listener_id=self.listener.get('id'))
self.amphora = self.create_amphora(uuidutils.generate_uuid(),
self.lb.get('id'))
def test_get(self):
ls = self.create_listener_stats(listener_id=self.listener.get('id'),
amphora_id=self.amphora.id)
expected = {
'listener': {
'bytes_in': ls['bytes_in'],
'bytes_out': ls['bytes_out'],
'active_connections': ls['active_connections'],
'total_connections': ls['total_connections'],
'request_errors': ls['request_errors']
}
}
response = self.get(self.ls_path)
response_body = response.json
self.assertEqual(expected, response_body)
| 41.173077 | 78 | 0.625876 |
793ee75cfba2cd24f2413dbb6e8cd1331928534f | 1,186 | gyp | Python | erizoAPI/binding.gyp | winlinvip/licode | ee4f012147264c29cc6d8282f2a573b801a2454b | [
"MIT"
] | 1 | 2018-08-21T03:59:44.000Z | 2018-08-21T03:59:44.000Z | erizoAPI/binding.gyp | winlinvip/licode | ee4f012147264c29cc6d8282f2a573b801a2454b | [
"MIT"
] | null | null | null | erizoAPI/binding.gyp | winlinvip/licode | ee4f012147264c29cc6d8282f2a573b801a2454b | [
"MIT"
] | 1 | 2018-08-21T03:59:47.000Z | 2018-08-21T03:59:47.000Z | {
'targets': [
{
'target_name': 'addon',
'sources': [ 'addon.cc', 'IOThreadPool.cc', 'ThreadPool.cc', 'WebRtcConnection.cc', 'OneToManyProcessor.cc', 'ExternalInput.cc', 'ExternalOutput.cc', 'SyntheticInput.cc'],
'include_dirs' : ["<!(node -e \"require('nan')\")", '$(ERIZO_HOME)/src/erizo', '$(ERIZO_HOME)/../build/libdeps/build/include', '$(ERIZO_HOME)/src/third_party/webrtc/src'],
'libraries': ['-L$(ERIZO_HOME)/build/erizo', '-lerizo'],
'conditions': [
[ 'OS=="mac"', {
'xcode_settings': {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES', # -fno-exceptions
'GCC_ENABLE_CPP_RTTI': 'YES', # -fno-rtti
'MACOSX_DEPLOYMENT_TARGET' : '10.11', #from MAC OS 10.7
'OTHER_CFLAGS': [
'-g -O3 -stdlib=libc++ -std=c++11',
]
},
}, { # OS!="mac"
'cflags!' : ['-fno-exceptions'],
'cflags' : ['-D__STDC_CONSTANT_MACROS'],
'cflags_cc' : ['-Wall', '-O3', '-g' , '-std=c++11', '-fexceptions'],
'cflags_cc!' : ['-fno-exceptions'],
'cflags_cc!' : ['-fno-rtti']
}],
]
}
]
}
| 40.896552 | 177 | 0.49747 |
793ee947a24618a93897a58f0933e8b67896b637 | 10,436 | py | Python | inc/python_image_utilities/test/image_util_test.py | wwarriner/unsupervised_onh_histo | 48ec23781af203a14ff590f3074a2d3559957560 | [
"MIT"
] | null | null | null | inc/python_image_utilities/test/image_util_test.py | wwarriner/unsupervised_onh_histo | 48ec23781af203a14ff590f3074a2d3559957560 | [
"MIT"
] | 9 | 2020-05-01T16:45:41.000Z | 2022-02-08T19:11:07.000Z | inc/python_image_utilities/test/image_util_test.py | wwarriner/unsupervised_onh_histo | 48ec23781af203a14ff590f3074a2d3559957560 | [
"MIT"
] | null | null | null | import unittest
from pathlib import PurePath, Path
from math import ceil
import numpy as np
from image_util import *
class Test(unittest.TestCase):
def setUp(self):
self.side_len = np.iinfo(np.uint8).max
self.base_shape = (self.side_len, self.side_len)
self.rgb = np.moveaxis(np.indices(self.base_shape), 0, -1).astype(np.uint8)
self.rgb = np.concatenate(
(self.rgb, np.zeros(self.base_shape + (1,)).astype(np.uint8)), axis=2
)
self.rgb_shape = self.rgb.shape
self.fov_radius_ratio = 0.45
self.fov_offset = (-35, 35)
self.fov_radius = floor(self.side_len * self.fov_radius_ratio)
self.mask = generate_circular_fov_mask(
self.base_shape, self.fov_radius, self.fov_offset
)
self.mask_shape = self.mask.shape
self.wait_time = 500
# ! set to be relatively prime to side_len
# ! different to check correct reshaping
self.patch_shape = (12, 13)
self.offsets = [(0, 0), (1, 0), (0, 1), (1, 1), self.patch_shape]
self.base_path = PurePath("test")
self.test_image_path = self.base_path / "test.jpg"
self.tulips_image_path = self.base_path / "tulips.png"
def tearDown(self):
pass
def read_test_image(self):
return load(str(self.test_image_path))
def read_tulips_image(self):
return load(str(self.tulips_image_path))
def show(self, image, tag):
show(image, tag)
cv2.moveWindow(tag, 100, 100)
cv2.waitKey(self.wait_time)
cv2.destroyWindow(tag)
def reduce_contrast(self, image):
factor = 3.0
minimum = 50
return (np.round(image / factor) + 50).astype(np.uint8)
def generate_image(self):
image = generate_noise(self.base_shape)
return self.reduce_contrast(image)
def read_gray_image(self):
image = cv2.imread(str(self.test_image_path))
image = rgb2gray(image)
return self.reduce_contrast(image)
def run_fn(self, image, fn, *args, **kwargs):
out = fn(image, *args, **kwargs)
vis = np.concatenate((image, out), axis=0)
tag = "test: {}".format(fn.__name__)
self.show(vis, tag)
def standardize(self, image):
standardized = standardize(image)
return self.rescale(standardized)
def rescale(self, image):
return rescale(image, out_range=(0, 255)).astype(np.uint8)
def test_adjust_gamma(self):
self.run_fn(self.read_gray_image(), adjust_gamma, 2.0)
self.run_fn(self.generate_image(), adjust_gamma, 2.0)
# TODO add structured assertions here
def test_apply_clahe(self):
self.run_fn(self.read_gray_image(), clahe)
self.run_fn(self.generate_image(), clahe)
# TODO add structured assertions here
def test_consensus(self):
# TWO_CLASS
A = np.array([[1, 1], [1, 1]])
B = np.array([[0, 1], [1, 1]])
C = np.array([[0, 0], [1, 1]])
D = np.array([[0, 0], [0, 1]])
data = np.stack([A, B, C, D])
RESULT_MIN = np.array([[0, 0], [1, 1]])[np.newaxis, ...]
con = consensus(data, threshold="majority")
self.assertTrue((con == RESULT_MIN).all())
RESULT_ZERO = np.array([[1, 1], [1, 1]])
con = consensus(data, threshold=0)
self.assertTrue((con == RESULT_ZERO).all())
con = consensus(data, threshold=0.0)
self.assertTrue((con == RESULT_ZERO).all())
RESULT_ONE = RESULT_ZERO
con = consensus(data, threshold=1)
self.assertTrue((con == RESULT_ONE).all())
con = consensus(data, threshold=0.25)
self.assertTrue((con == RESULT_ONE).all())
RESULT_TWO = np.array([[0, 1], [1, 1]])
con = consensus(data, threshold=2)
self.assertTrue((con == RESULT_TWO).all())
con = consensus(data, threshold=0.5)
self.assertTrue((con == RESULT_TWO).all())
RESULT_THREE = RESULT_MIN
con = consensus(data, threshold=3)
self.assertTrue((con == RESULT_THREE).all())
con = consensus(data, threshold=0.75)
self.assertTrue((con == RESULT_THREE).all())
RESULT_FOUR = np.array([[0, 0], [0, 1]])
con = consensus(data, threshold=4)
self.assertTrue((con == RESULT_FOUR).all())
con = consensus(data, threshold=1.0)
self.assertTrue((con == RESULT_FOUR).all())
RESULT_FIVE = np.array([[0, 0], [0, 0]])
con = consensus(data, threshold=5)
self.assertTrue((con == RESULT_FIVE).all())
# MULTI_CLASS
A = np.array([[1, 2], [2, 2]])
B = np.array([[0, 1], [2, 2]])
C = np.array([[0, 1], [1, 2]])
D = np.array([[0, 0], [1, 1]])
data = np.stack([A, B, C, D])
RESULT_MIN = np.array([[0, 1], [1, 2]])
con = consensus(data, threshold="majority")
self.assertTrue((con == RESULT_MIN).all())
self.assertRaises(AssertionError, consensus, data, threshold=1)
self.assertRaises(AssertionError, consensus, data, threshold=1)
def test_load_images(self):
images, names = load_images(self.base_path)
self.assertEqual(len(images), 2)
self.assertEqual(len(names), 2)
self.assertEqual(names[0], self.test_image_path)
self.assertEqual(names[1], self.tulips_image_path)
def test_montage(self):
patches, _, _ = patchify(self.rgb, self.patch_shape)
count = patches.shape[0]
montage_len = floor(count ** 0.5)
montage_shape = (montage_len, montage_len)
# sequential order (baseline)
m = montage(patches, montage_shape)
self.show(m, "test: sequential")
# random order
m = montage(patches, montage_shape, mode="random")
self.show(m, "test: random")
# non-zero start
start = 5 * count // 13
m = montage(patches, montage_shape, mode="random", start=start)
self.show(m, "test: start={}".format(start))
# with repeats
m = montage(patches, montage_shape, mode="random", repeat=True, start=start)
self.show(m, "test: with repeats")
# auto shape
m = montage(patches, mode="random", repeat=True, start=start)
self.show(m, "test: with auto-shape")
# defined aspect ratio
m = montage(patches, 2.0, mode="random", repeat=True, start=start)
self.show(m, "test: with auto-shape")
# defined aspect ratio
m = montage(patches, 2.0, mode="random", start=start)
self.show(m, "test: with auto-shape")
def test_overlay(self):
image = self.read_test_image()
noise = generate_noise(image.shape)[..., np.newaxis]
color = [0.5, 1.0, 0.2]
self.show(overlay(image, noise, color, alpha=0.2, beta=0.8), "test: overlay")
def test_patchify(self):
for offset in self.offsets:
reqd_pre_padding = (
np.array(self.patch_shape) - np.array(offset)
) % np.array(self.patch_shape)
reqd_post_padding = self.patch_shape - np.remainder(
np.array(self.rgb.shape[:-1]) + reqd_pre_padding,
np.array(self.patch_shape),
)
reqd_padding = list(zip(reqd_pre_padding, reqd_post_padding))
padded_shape = self.rgb.shape[:-1] + reqd_pre_padding + reqd_post_padding
counts = np.array(
[ceil(x / y) for x, y in zip(padded_shape, self.patch_shape)]
)
count = counts.prod()
patches, patch_count, padding = patchify(
self.rgb, self.patch_shape, offset=offset
)
self.assertEqual(patches.ndim, self.rgb.ndim + 1)
self.assertEqual(patches.shape[0], count)
self.assertEqual(patches.shape[1:3], self.patch_shape)
self.assertEqual(patches.shape[3], self.rgb.shape[2])
self.assertEqual(len(patch_count), 2)
self.assertTrue((patch_count == counts.ravel()).all())
self.assertEqual(len(padding), 2)
all_padding = np.array([list(p) for p in padding])
all_reqd_padding = np.array([list(p) for p in reqd_padding])
self.assertTrue((all_padding == all_reqd_padding).all())
def test_rescale(self):
self.run_fn(self.read_gray_image(), self.rescale)
self.run_fn(self.generate_image(), self.rescale)
# TODO add structured assertions here
def test_save_load(self):
try:
path = PurePath("image_util_test_output.png")
save(str(path), self.rgb.astype(np.uint8))
self.show(load(str(path)), "test: save/load")
cv2.waitKey(self.wait_time)
finally:
if Path(path).is_file():
Path(path).unlink()
def test_show(self):
self.show(self.rgb.astype(np.uint8), "test: visualize_rgb (blue and green?)")
self.show(self.rgb[..., 0], "test: visualize_gray (is gradient?)")
self.show(
(self.mask * 255).astype(np.uint8), "test: visualize_gray (is circle?)"
)
self.show(rgb2gray(self.read_tulips_image()), "test: visualize_gray (is gray?)")
self.show(self.read_tulips_image(), "test: visualize_color (is color?)")
def test_stack(self):
n = 3
s = stack(n * (self.rgb,))
self.assertEqual(s.shape[0], n)
self.assertEqual(s.shape[1:], self.rgb.shape[0:])
self.assertIsInstance(s, np.ndarray)
def test_standardize(self):
self.run_fn(self.read_gray_image(), self.standardize)
self.run_fn(self.generate_image(), self.standardize)
# TODO add structured assertions here
def test_unpatchify(self):
input_images = np.stack((self.rgb, self.rgb))
for offset in self.offsets:
patches, patch_count, padding = patchify(
input_images, self.patch_shape, offset=offset
)
images = unpatchify(patches, patch_count, padding)
self.assertEqual(images.ndim, self.rgb.ndim + 1)
self.assertEqual(images.shape, input_images.shape)
self.assertTrue((input_images == images).all())
# TODO test_load_folder
# TODO test_save_images
# TODO test_mask_images
# TODO test_get_center
# TODO test_generate_circular_fov_mask
if __name__ == "__main__":
unittest.main()
| 37.007092 | 88 | 0.596685 |
793ee978e94e3cbf79d589a00fa3efce64b9696b | 80,625 | py | Python | python/ccxt/bitrue.py | DoctorSlimm/ccxt | 8f19512dfc5dac159eaeb465c98226c00252a9b6 | [
"MIT"
] | 1 | 2021-11-16T15:45:34.000Z | 2021-11-16T15:45:34.000Z | python/ccxt/bitrue.py | DoctorSlimm/ccxt | 8f19512dfc5dac159eaeb465c98226c00252a9b6 | [
"MIT"
] | null | null | null | python/ccxt/bitrue.py | DoctorSlimm/ccxt | 8f19512dfc5dac159eaeb465c98226c00252a9b6 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import json
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import AccountSuspended
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import OrderImmediatelyFillable
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import OnMaintenance
from ccxt.base.errors import InvalidNonce
from ccxt.base.decimal_to_precision import TRUNCATE
from ccxt.base.precise import Precise
class bitrue(Exchange):
def describe(self):
return self.deep_extend(super(bitrue, self).describe(), {
'id': 'bitrue',
'name': 'Bitrue',
'countries': ['SG'], # Singapore, Malta
'rateLimit': 1000,
'certified': False,
'version': 'v1',
# new metainfo interface
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': None, # has but unimplemented
'future': None,
'option': False,
'cancelAllOrders': False,
'cancelOrder': True,
'createOrder': True,
'createStopLimitOrder': True,
'createStopMarketOrder': True,
'createStopOrder': True,
'fetchBalance': True,
'fetchBidsAsks': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchDepositAddress': False,
'fetchDeposits': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': 'emulated',
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': False,
'fetchStatus': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTradingFee': False,
'fetchTradingFees': False,
'fetchTransactionFees': False,
'fetchTransactions': False,
'fetchTransfers': False,
'fetchWithdrawals': True,
'transfer': False,
'withdraw': True,
},
'timeframes': {
'1m': '1m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'1d': '1d',
'1w': '1w',
'1M': '1M',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/139516488-243a830d-05dd-446b-91c6-c1f18fe30c63.jpg',
'api': {
'v1': 'https://www.bitrue.com/api/v1',
'v2': 'https://www.bitrue.com/api/v2',
'kline': 'https://www.bitrue.com/kline-api',
},
'www': 'https://www.bitrue.com',
'referral': 'https://www.bitrue.com/activity/task/task-landing?inviteCode=EZWETQE&cn=900000',
'doc': [
'https://github.com/Bitrue-exchange/bitrue-official-api-docs',
],
'fees': 'https://bitrue.zendesk.com/hc/en-001/articles/4405479952537',
},
'api': {
'kline': {
'public': {
'get': {
'public.json': 1,
'public{currency}.json': 1,
},
},
},
'v1': {
'public': {
'get': {
'ping': 1,
'time': 1,
'exchangeInfo': 1,
'depth': {'cost': 1, 'byLimit': [[100, 1], [500, 5], [1000, 10]]},
'trades': 1,
'historicalTrades': 5,
'aggTrades': 1,
'ticker/24hr': {'cost': 1, 'noSymbol': 40},
'ticker/price': {'cost': 1, 'noSymbol': 2},
'ticker/bookTicker': {'cost': 1, 'noSymbol': 2},
},
},
'private': {
'get': {
'order': 1,
'openOrders': 1,
'allOrders': 5,
'account': 5,
'myTrades': {'cost': 5, 'noSymbol': 40},
'etf/net-value/{symbol}': 1,
'withdraw/history': 1,
'deposit/history': 1,
},
'post': {
'order': 4,
'withdraw/commit': 1,
},
'delete': {
'order': 1,
},
},
},
'v2': {
'private': {
'get': {
'myTrades': 5,
},
},
},
},
'fees': {
'trading': {
'feeSide': 'get',
'tierBased': False,
'percentage': True,
'taker': self.parse_number('0.00098'),
'maker': self.parse_number('0.00098'),
},
'future': {
'trading': {
'feeSide': 'quote',
'tierBased': True,
'percentage': True,
'taker': self.parse_number('0.000400'),
'maker': self.parse_number('0.000200'),
'tiers': {
'taker': [
[self.parse_number('0'), self.parse_number('0.000400')],
[self.parse_number('250'), self.parse_number('0.000400')],
[self.parse_number('2500'), self.parse_number('0.000350')],
[self.parse_number('7500'), self.parse_number('0.000320')],
[self.parse_number('22500'), self.parse_number('0.000300')],
[self.parse_number('50000'), self.parse_number('0.000270')],
[self.parse_number('100000'), self.parse_number('0.000250')],
[self.parse_number('200000'), self.parse_number('0.000220')],
[self.parse_number('400000'), self.parse_number('0.000200')],
[self.parse_number('750000'), self.parse_number('0.000170')],
],
'maker': [
[self.parse_number('0'), self.parse_number('0.000200')],
[self.parse_number('250'), self.parse_number('0.000160')],
[self.parse_number('2500'), self.parse_number('0.000140')],
[self.parse_number('7500'), self.parse_number('0.000120')],
[self.parse_number('22500'), self.parse_number('0.000100')],
[self.parse_number('50000'), self.parse_number('0.000080')],
[self.parse_number('100000'), self.parse_number('0.000060')],
[self.parse_number('200000'), self.parse_number('0.000040')],
[self.parse_number('400000'), self.parse_number('0.000020')],
[self.parse_number('750000'), self.parse_number('0')],
],
},
},
},
'delivery': {
'trading': {
'feeSide': 'base',
'tierBased': True,
'percentage': True,
'taker': self.parse_number('0.000500'),
'maker': self.parse_number('0.000100'),
'tiers': {
'taker': [
[self.parse_number('0'), self.parse_number('0.000500')],
[self.parse_number('250'), self.parse_number('0.000450')],
[self.parse_number('2500'), self.parse_number('0.000400')],
[self.parse_number('7500'), self.parse_number('0.000300')],
[self.parse_number('22500'), self.parse_number('0.000250')],
[self.parse_number('50000'), self.parse_number('0.000240')],
[self.parse_number('100000'), self.parse_number('0.000240')],
[self.parse_number('200000'), self.parse_number('0.000240')],
[self.parse_number('400000'), self.parse_number('0.000240')],
[self.parse_number('750000'), self.parse_number('0.000240')],
],
'maker': [
[self.parse_number('0'), self.parse_number('0.000100')],
[self.parse_number('250'), self.parse_number('0.000080')],
[self.parse_number('2500'), self.parse_number('0.000050')],
[self.parse_number('7500'), self.parse_number('0.0000030')],
[self.parse_number('22500'), self.parse_number('0')],
[self.parse_number('50000'), self.parse_number('-0.000050')],
[self.parse_number('100000'), self.parse_number('-0.000060')],
[self.parse_number('200000'), self.parse_number('-0.000070')],
[self.parse_number('400000'), self.parse_number('-0.000080')],
[self.parse_number('750000'), self.parse_number('-0.000090')],
],
},
},
},
},
# exchange-specific options
'options': {
# 'fetchTradesMethod': 'publicGetAggTrades', # publicGetTrades, publicGetHistoricalTrades
'fetchMyTradesMethod': 'v2PrivateGetMyTrades', # v1PrivateGetMyTrades
'hasAlreadyAuthenticatedSuccessfully': False,
'recvWindow': 5 * 1000, # 5 sec, binance default
'timeDifference': 0, # the difference between system clock and Binance clock
'adjustForTimeDifference': False, # controls the adjustment logic upon instantiation
'parseOrderToPrecision': False, # force amounts and costs in parseOrder to precision
'newOrderRespType': {
'market': 'FULL', # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills
'limit': 'FULL', # we change it from 'ACK' by default to 'FULL'(returns immediately if limit is not hit)
},
'networks': {
'SPL': 'SOLANA',
'SOL': 'SOLANA',
'DOGE': 'dogecoin',
'ADA': 'Cardano',
},
},
'commonCurrencies': {
'MIM': 'MIM Swarm',
},
# https://binance-docs.github.io/apidocs/spot/en/#error-codes-2
'exceptions': {
'exact': {
'System is under maintenance.': OnMaintenance, # {"code":1,"msg":"System is under maintenance."}
'System abnormality': ExchangeError, # {"code":-1000,"msg":"System abnormality"}
'You are not authorized to execute self request.': PermissionDenied, # {"msg":"You are not authorized to execute self request."}
'API key does not exist': AuthenticationError,
'Order would trigger immediately.': OrderImmediatelyFillable,
'Stop price would trigger immediately.': OrderImmediatelyFillable, # {"code":-2010,"msg":"Stop price would trigger immediately."}
'Order would immediately match and take.': OrderImmediatelyFillable, # {"code":-2010,"msg":"Order would immediately match and take."}
'Account has insufficient balance for requested action.': InsufficientFunds,
'Rest API trading is not enabled.': ExchangeNotAvailable,
"You don't have permission.": PermissionDenied, # {"msg":"You don't have permission.","success":false}
'Market is closed.': ExchangeNotAvailable, # {"code":-1013,"msg":"Market is closed."}
'Too many requests. Please try again later.': DDoSProtection, # {"msg":"Too many requests. Please try again later.","success":false}
'-1000': ExchangeNotAvailable, # {"code":-1000,"msg":"An unknown error occured while processing the request."}
'-1001': ExchangeNotAvailable, # 'Internal error; unable to process your request. Please try again.'
'-1002': AuthenticationError, # 'You are not authorized to execute self request.'
'-1003': RateLimitExceeded, # {"code":-1003,"msg":"Too much request weight used, current limit is 1200 request weight per 1 MINUTE. Please use the websocket for live updates to avoid polling the API."}
'-1013': InvalidOrder, # createOrder -> 'invalid quantity'/'invalid price'/MIN_NOTIONAL
'-1015': RateLimitExceeded, # 'Too many new orders; current limit is %s orders per %s.'
'-1016': ExchangeNotAvailable, # 'This service is no longer available.',
'-1020': BadRequest, # 'This operation is not supported.'
'-1021': InvalidNonce, # 'your time is ahead of server'
'-1022': AuthenticationError, # {"code":-1022,"msg":"Signature for self request is not valid."}
'-1100': BadRequest, # createOrder(symbol, 1, asdf) -> 'Illegal characters found in parameter 'price'
'-1101': BadRequest, # Too many parameters; expected %s and received %s.
'-1102': BadRequest, # Param %s or %s must be sent, but both were empty
'-1103': BadRequest, # An unknown parameter was sent.
'-1104': BadRequest, # Not all sent parameters were read, read 8 parameters but was sent 9
'-1105': BadRequest, # Parameter %s was empty.
'-1106': BadRequest, # Parameter %s sent when not required.
'-1111': BadRequest, # Precision is over the maximum defined for self asset.
'-1112': InvalidOrder, # No orders on book for symbol.
'-1114': BadRequest, # TimeInForce parameter sent when not required.
'-1115': BadRequest, # Invalid timeInForce.
'-1116': BadRequest, # Invalid orderType.
'-1117': BadRequest, # Invalid side.
'-1118': BadRequest, # New client order ID was empty.
'-1119': BadRequest, # Original client order ID was empty.
'-1120': BadRequest, # Invalid interval.
'-1121': BadSymbol, # Invalid symbol.
'-1125': AuthenticationError, # This listenKey does not exist.
'-1127': BadRequest, # More than %s hours between startTime and endTime.
'-1128': BadRequest, # {"code":-1128,"msg":"Combination of optional parameters invalid."}
'-1130': BadRequest, # Data sent for paramter %s is not valid.
'-1131': BadRequest, # recvWindow must be less than 60000
'-2008': AuthenticationError, # {"code":-2008,"msg":"Invalid Api-Key ID."}
'-2010': ExchangeError, # generic error code for createOrder -> 'Account has insufficient balance for requested action.', {"code":-2010,"msg":"Rest API trading is not enabled."}, etc...
'-2011': OrderNotFound, # cancelOrder(1, 'BTC/USDT') -> 'UNKNOWN_ORDER'
'-2013': OrderNotFound, # fetchOrder(1, 'BTC/USDT') -> 'Order does not exist'
'-2014': AuthenticationError, # {"code":-2014, "msg": "API-key format invalid."}
'-2015': AuthenticationError, # "Invalid API-key, IP, or permissions for action."
'-2019': InsufficientFunds, # {"code":-2019,"msg":"Margin is insufficient."}
'-3005': InsufficientFunds, # {"code":-3005,"msg":"Transferring out not allowed. Transfer out amount exceeds max amount."}
'-3006': InsufficientFunds, # {"code":-3006,"msg":"Your borrow amount has exceed maximum borrow amount."}
'-3008': InsufficientFunds, # {"code":-3008,"msg":"Borrow not allowed. Your borrow amount has exceed maximum borrow amount."}
'-3010': ExchangeError, # {"code":-3010,"msg":"Repay not allowed. Repay amount exceeds borrow amount."}
'-3015': ExchangeError, # {"code":-3015,"msg":"Repay amount exceeds borrow amount."}
'-3022': AccountSuspended, # You account's trading is banned.
'-4028': BadRequest, # {"code":-4028,"msg":"Leverage 100 is not valid"}
'-3020': InsufficientFunds, # {"code":-3020,"msg":"Transfer out amount exceeds max amount."}
'-3041': InsufficientFunds, # {"code":-3041,"msg":"Balance is not enough"}
'-5013': InsufficientFunds, # Asset transfer failed: insufficient balance"
'-11008': InsufficientFunds, # {"code":-11008,"msg":"Exceeding the account's maximum borrowable limit."}
'-4051': InsufficientFunds, # {"code":-4051,"msg":"Isolated balance insufficient."}
},
'broad': {
'has no operation privilege': PermissionDenied,
'MAX_POSITION': InvalidOrder, # {"code":-2010,"msg":"Filter failure: MAX_POSITION"}
},
},
})
def cost_to_precision(self, symbol, cost):
return self.decimal_to_precision(cost, TRUNCATE, self.markets[symbol]['precision']['quote'], self.precisionMode, self.paddingMode)
def currency_to_precision(self, code, fee, networkCode=None):
# info is available in currencies only if the user has configured his api keys
if self.safe_value(self.currencies[code], 'precision') is not None:
return self.decimal_to_precision(fee, TRUNCATE, self.currencies[code]['precision'], self.precisionMode, self.paddingMode)
else:
return self.number_to_string(fee)
def nonce(self):
return self.milliseconds() - self.options['timeDifference']
def fetch_status(self, params={}):
"""
the latest known information on the availability of the exchange API
:param dict params: extra parameters specific to the bitrue api endpoint
:returns dict: a `status structure <https://docs.ccxt.com/en/latest/manual.html#exchange-status-structure>`
"""
response = self.v1PublicGetPing(params)
#
# empty means working status.
#
# {}
#
keys = list(response.keys())
keysLength = len(keys)
formattedStatus = 'maintenance' if keysLength else 'ok'
return {
'status': formattedStatus,
'updated': None,
'eta': None,
'url': None,
'info': response,
}
def fetch_time(self, params={}):
"""
fetches the current integer timestamp in milliseconds from the exchange server
:param dict params: extra parameters specific to the bitrue api endpoint
:returns int: the current integer timestamp in milliseconds from the exchange server
"""
response = self.v1PublicGetTime(params)
#
# {
# "serverTime":1635467280514
# }
#
return self.safe_integer(response, 'serverTime')
def safe_network(self, networkId):
uppercaseNetworkId = networkId.upper()
networksById = {
'Aeternity': 'Aeternity',
'AION': 'AION',
'Algorand': 'Algorand',
'ASK': 'ASK',
'ATOM': 'ATOM',
'AVAX C-Chain': 'AVAX C-Chain',
'bch': 'bch',
'BCH': 'BCH',
'BEP2': 'BEP2',
'BEP20': 'BEP20',
'Bitcoin': 'Bitcoin',
'BRP20': 'BRP20',
'Cardano': 'ADA',
'CasinoCoin': 'CasinoCoin',
'CasinoCoin XRPL': 'CasinoCoin XRPL',
'Contentos': 'Contentos',
'Dash': 'Dash',
'Decoin': 'Decoin',
'DeFiChain': 'DeFiChain',
'DGB': 'DGB',
'Divi': 'Divi',
'dogecoin': 'DOGE',
'EOS': 'EOS',
'ERC20': 'ERC20',
'ETC': 'ETC',
'Filecoin': 'Filecoin',
'FREETON': 'FREETON',
'HBAR': 'HBAR',
'Hedera Hashgraph': 'Hedera Hashgraph',
'HRC20': 'HRC20',
'ICON': 'ICON',
'ICP': 'ICP',
'Ignis': 'Ignis',
'Internet Computer': 'Internet Computer',
'IOTA': 'IOTA',
'KAVA': 'KAVA',
'KSM': 'KSM',
'LiteCoin': 'LiteCoin',
'Luna': 'Luna',
'MATIC': 'MATIC',
'Mobile Coin': 'Mobile Coin',
'MonaCoin': 'MonaCoin',
'Monero': 'Monero',
'NEM': 'NEM',
'NEP5': 'NEP5',
'OMNI': 'OMNI',
'PAC': 'PAC',
'Polkadot': 'Polkadot',
'Ravencoin': 'Ravencoin',
'Safex': 'Safex',
'SOLANA': 'SOL',
'Songbird': 'Songbird',
'Stellar Lumens': 'Stellar Lumens',
'Symbol': 'Symbol',
'Tezos': 'XTZ',
'theta': 'theta',
'THETA': 'THETA',
'TRC20': 'TRC20',
'VeChain': 'VeChain',
'VECHAIN': 'VECHAIN',
'Wanchain': 'Wanchain',
'XinFin Network': 'XinFin Network',
'XRP': 'XRP',
'XRPL': 'XRPL',
'ZIL': 'ZIL',
}
return self.safe_string_2(networksById, networkId, uppercaseNetworkId, networkId)
def fetch_currencies(self, params={}):
"""
fetches all available currencies on an exchange
:param dict params: extra parameters specific to the bitrue api endpoint
:returns dict: an associative dictionary of currencies
"""
response = self.v1PublicGetExchangeInfo(params)
#
# {
# "timezone":"CTT",
# "serverTime":1635464889117,
# "rateLimits":[
# {"rateLimitType":"REQUESTS_WEIGHT","interval":"MINUTES","limit":6000},
# {"rateLimitType":"ORDERS","interval":"SECONDS","limit":150},
# {"rateLimitType":"ORDERS","interval":"DAYS","limit":288000},
# ],
# "exchangeFilters":[],
# "symbols":[
# {
# "symbol":"SHABTC",
# "status":"TRADING",
# "baseAsset":"sha",
# "baseAssetPrecision":0,
# "quoteAsset":"btc",
# "quotePrecision":10,
# "orderTypes":["MARKET","LIMIT"],
# "icebergAllowed":false,
# "filters":[
# {"filterType":"PRICE_FILTER","minPrice":"0.00000001349","maxPrice":"0.00000017537","priceScale":10},
# {"filterType":"LOT_SIZE","minQty":"1.0","minVal":"0.00020","maxQty":"1000000000","volumeScale":0},
# ],
# "defaultPrice":"0.0000006100",
# },
# ],
# "coins":[
# {
# "coin":"sbr",
# "coinFulName":"Saber",
# "enableWithdraw":true,
# "enableDeposit":true,
# "chains":["SOLANA"],
# "withdrawFee":"2.0",
# "minWithdraw":"5.0",
# "maxWithdraw":"1000000000000000",
# },
# ],
# }
#
result = {}
coins = self.safe_value(response, 'coins', [])
for i in range(0, len(coins)):
currency = coins[i]
id = self.safe_string(currency, 'coin')
name = self.safe_string(currency, 'coinFulName')
code = self.safe_currency_code(id)
enableDeposit = self.safe_value(currency, 'enableDeposit')
enableWithdraw = self.safe_value(currency, 'enableWithdraw')
precision = None
networkIds = self.safe_value(currency, 'chains', [])
networks = {}
for j in range(0, len(networkIds)):
networkId = networkIds[j]
network = self.safe_network(networkId)
networks[network] = {
'info': networkId,
'id': networkId,
'network': network,
'active': None,
'fee': None,
'precision': None,
'limits': {
'withdraw': {
'min': None,
'max': None,
},
},
}
active = (enableWithdraw and enableDeposit)
result[code] = {
'id': id,
'name': name,
'code': code,
'precision': precision,
'info': currency,
'active': active,
'deposit': enableDeposit,
'withdraw': enableWithdraw,
'networks': networks,
'fee': self.safe_number(currency, 'withdrawFee'),
# 'fees': fees,
'limits': {
'withdraw': {
'min': self.safe_number(currency, 'minWithdraw'),
'max': self.safe_number(currency, 'maxWithdraw'),
},
},
}
return result
def fetch_markets(self, params={}):
"""
retrieves data on all markets for bitrue
:param dict params: extra parameters specific to the exchange api endpoint
:returns [dict]: an array of objects representing market data
"""
response = self.v1PublicGetExchangeInfo(params)
#
# {
# "timezone":"CTT",
# "serverTime":1635464889117,
# "rateLimits":[
# {"rateLimitType":"REQUESTS_WEIGHT","interval":"MINUTES","limit":6000},
# {"rateLimitType":"ORDERS","interval":"SECONDS","limit":150},
# {"rateLimitType":"ORDERS","interval":"DAYS","limit":288000},
# ],
# "exchangeFilters":[],
# "symbols":[
# {
# "symbol":"SHABTC",
# "status":"TRADING",
# "baseAsset":"sha",
# "baseAssetPrecision":0,
# "quoteAsset":"btc",
# "quotePrecision":10,
# "orderTypes":["MARKET","LIMIT"],
# "icebergAllowed":false,
# "filters":[
# {"filterType":"PRICE_FILTER","minPrice":"0.00000001349","maxPrice":"0.00000017537","priceScale":10},
# {"filterType":"LOT_SIZE","minQty":"1.0","minVal":"0.00020","maxQty":"1000000000","volumeScale":0},
# ],
# "defaultPrice":"0.0000006100",
# },
# ],
# "coins":[
# {
# "coin":"sbr",
# "coinFulName":"Saber",
# "enableWithdraw":true,
# "enableDeposit":true,
# "chains":["SOLANA"],
# "withdrawFee":"2.0",
# "minWithdraw":"5.0",
# "maxWithdraw":"1000000000000000",
# },
# ],
# }
#
if self.options['adjustForTimeDifference']:
self.load_time_difference()
markets = self.safe_value(response, 'symbols', [])
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'symbol')
lowercaseId = self.safe_string_lower(market, 'symbol')
baseId = self.safe_string(market, 'baseAsset')
quoteId = self.safe_string(market, 'quoteAsset')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
filters = self.safe_value(market, 'filters', [])
filtersByType = self.index_by(filters, 'filterType')
status = self.safe_string(market, 'status')
priceDefault = self.safe_integer(market, 'pricePrecision')
amountDefault = self.safe_integer(market, 'quantityPrecision')
priceFilter = self.safe_value(filtersByType, 'PRICE_FILTER', {})
amountFilter = self.safe_value(filtersByType, 'LOT_SIZE', {})
entry = {
'id': id,
'lowercaseId': lowercaseId,
'symbol': base + '/' + quote,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'active': (status == 'TRADING'),
'contract': False,
'linear': None,
'inverse': None,
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.safe_integer(amountFilter, 'volumeScale', amountDefault),
'price': self.safe_integer(priceFilter, 'priceScale', priceDefault),
'base': self.safe_integer(market, 'baseAssetPrecision'),
'quote': self.safe_integer(market, 'quotePrecision'),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.safe_number(amountFilter, 'minQty'),
'max': self.safe_number(amountFilter, 'maxQty'),
},
'price': {
'min': self.safe_number(priceFilter, 'minPrice'),
'max': self.safe_number(priceFilter, 'maxPrice'),
},
'cost': {
'min': self.safe_number(amountFilter, 'minVal'),
'max': None,
},
},
'info': market,
}
result.append(entry)
return result
def parse_balance(self, response):
result = {
'info': response,
}
timestamp = self.safe_integer(response, 'updateTime')
balances = self.safe_value_2(response, 'balances', [])
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'asset')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'free')
account['used'] = self.safe_string(balance, 'locked')
result[code] = account
result['timestamp'] = timestamp
result['datetime'] = self.iso8601(timestamp)
return self.safe_balance(result)
def fetch_balance(self, params={}):
"""
query for balance and get the amount of funds available for trading or funds locked in orders
:param dict params: extra parameters specific to the bitrue api endpoint
:returns dict: a `balance structure <https://docs.ccxt.com/en/latest/manual.html?#balance-structure>`
"""
self.load_markets()
response = self.v1PrivateGetAccount(params)
#
# {
# "makerCommission":0,
# "takerCommission":0,
# "buyerCommission":0,
# "sellerCommission":0,
# "updateTime":null,
# "balances":[
# {"asset":"sbr","free":"0","locked":"0"},
# {"asset":"ksm","free":"0","locked":"0"},
# {"asset":"neo3s","free":"0","locked":"0"},
# ],
# "canTrade":false,
# "canWithdraw":false,
# "canDeposit":false
# }
#
return self.parse_balance(response)
def fetch_order_book(self, symbol, limit=None, params={}):
"""
fetches information on open orders with bid(buy) and ask(sell) prices, volumes and other data
:param str symbol: unified symbol of the market to fetch the order book for
:param int|None limit: the maximum amount of order book entries to return
:param dict params: extra parameters specific to the bitrue api endpoint
:returns dict: A dictionary of `order book structures <https://docs.ccxt.com/en/latest/manual.html#order-book-structure>` indexed by market symbols
"""
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit # default 100, max 1000, see https://github.com/Bitrue-exchange/bitrue-official-api-docs#order-book
response = self.v1PublicGetDepth(self.extend(request, params))
#
# {
# "lastUpdateId":1635474910177,
# "bids":[
# ["61436.84","0.05",[]],
# ["61435.77","0.0124",[]],
# ["61434.88","0.012",[]],
# ],
# "asks":[
# ["61452.46","0.0001",[]],
# ["61452.47","0.0597",[]],
# ["61452.76","0.0713",[]],
# ]
# }
#
orderbook = self.parse_order_book(response, symbol)
orderbook['nonce'] = self.safe_integer(response, 'lastUpdateId')
return orderbook
def parse_ticker(self, ticker, market=None):
#
# fetchTicker
#
# {
# "id":397945892,
# "last":"1.143411",
# "lowestAsk":"1.144223",
# "highestBid":"1.141696",
# "percentChange":"-0.001432",
# "baseVolume":"338287",
# "quoteVolume":"415013.244366",
# "isFrozen":"0",
# "high24hr":"1.370087",
# "low24hr":"1.370087",
# }
#
symbol = self.safe_symbol(None, market)
last = self.safe_string(ticker, 'last')
return self.safe_ticker({
'symbol': symbol,
'timestamp': None,
'datetime': None,
'high': self.safe_string(ticker, 'high24hr'),
'low': self.safe_string(ticker, 'low24hr'),
'bid': self.safe_string(ticker, 'highestBid'),
'bidVolume': None,
'ask': self.safe_string(ticker, 'lowestAsk'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': self.safe_string(ticker, 'percentChange'),
'average': None,
'baseVolume': self.safe_string(ticker, 'baseVolume'),
'quoteVolume': self.safe_string(ticker, 'quoteVolume'),
'info': ticker,
}, market)
def fetch_ticker(self, symbol, params={}):
"""
fetches a price ticker, a statistical calculation with the information calculated over the past 24 hours for a specific market
:param str symbol: unified symbol of the market to fetch the ticker for
:param dict params: extra parameters specific to the bitrue api endpoint
:returns dict: a `ticker structure <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
self.load_markets()
market = self.market(symbol)
uppercaseBaseId = self.safe_string_upper(market, 'baseId')
uppercaseQuoteId = self.safe_string_upper(market, 'quoteId')
request = {
'currency': uppercaseQuoteId,
'command': 'returnTicker',
}
response = self.klinePublicGetPublicCurrencyJson(self.extend(request, params))
#
# {
# "code":"200",
# "msg":"success",
# "data":{
# "DODO3S_USDT":{
# "id":397945892,
# "last":"1.143411",
# "lowestAsk":"1.144223",
# "highestBid":"1.141696",
# "percentChange":"-0.001432",
# "baseVolume":"338287",
# "quoteVolume":"415013.244366",
# "isFrozen":"0",
# "high24hr":"1.370087",
# "low24hr":"1.370087"
# }
# }
# }
#
data = self.safe_value(response, 'data', {})
id = uppercaseBaseId + '_' + uppercaseQuoteId
ticker = self.safe_value(data, id)
if ticker is None:
raise ExchangeError(self.id + ' fetchTicker() could not find the ticker for ' + market['symbol'])
return self.parse_ticker(ticker, market)
def fetch_bids_asks(self, symbols=None, params={}):
"""
fetches the bid and ask price and volume for multiple markets
:param [str]|None symbols: unified symbols of the markets to fetch the bids and asks for, all markets are returned if not assigned
:param dict params: extra parameters specific to the bitrue api endpoint
:returns dict: an array of `ticker structures <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
self.load_markets()
defaultType = self.safe_string_2(self.options, 'fetchBidsAsks', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
method = None
if type == 'future':
method = 'fapiPublicGetTickerBookTicker'
elif type == 'delivery':
method = 'dapiPublicGetTickerBookTicker'
else:
method = 'publicGetTickerBookTicker'
response = getattr(self, method)(query)
return self.parse_tickers(response, symbols)
def fetch_tickers(self, symbols=None, params={}):
"""
fetches price tickers for multiple markets, statistical calculations with the information calculated over the past 24 hours each market
:param [str]|None symbols: unified symbols of the markets to fetch the ticker for, all market tickers are returned if not assigned
:param dict params: extra parameters specific to the bitrue api endpoint
:returns dict: an array of `ticker structures <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
self.load_markets()
request = {
'command': 'returnTicker',
}
response = self.klinePublicGetPublicJson(self.extend(request, params))
#
# {
# "code":"200",
# "msg":"success",
# "data":{
# "DODO3S_USDT":{
# "id":397945892,
# "last":"1.143411",
# "lowestAsk":"1.144223",
# "highestBid":"1.141696",
# "percentChange":"-0.001432",
# "baseVolume":"338287",
# "quoteVolume":"415013.244366",
# "isFrozen":"0",
# "high24hr":"1.370087",
# "low24hr":"1.370087"
# }
# }
# }
#
data = self.safe_value(response, 'data', {})
ids = list(data.keys())
result = {}
for i in range(0, len(ids)):
id = ids[i]
baseId, quoteId = id.split('_')
marketId = baseId + quoteId
market = self.safe_market(marketId)
rawTicker = self.safe_value(data, id)
ticker = self.parse_ticker(rawTicker, market)
symbol = ticker['symbol']
result[symbol] = ticker
return result
def parse_trade(self, trade, market=None):
#
# aggregate trades
#
# {
# "a": 26129, # Aggregate tradeId
# "p": "0.01633102", # Price
# "q": "4.70443515", # Quantity
# "f": 27781, # First tradeId
# "l": 27781, # Last tradeId
# "T": 1498793709153, # Timestamp
# "m": True, # Was the buyer the maker?
# "M": True # Was the trade the best price match?
# }
#
# recent public trades and old public trades
#
# {
# "id": 28457,
# "price": "4.00000100",
# "qty": "12.00000000",
# "time": 1499865549590,
# "isBuyerMaker": True,
# "isBestMatch": True
# }
#
# private trades
#
# {
# "symbol":"USDCUSDT",
# "id":20725156,
# "orderId":2880918576,
# "origClientOrderId":null,
# "price":"0.9996000000000000",
# "qty":"100.0000000000000000",
# "commission":null,
# "commissionAssert":null,
# "time":1635558511000,
# "isBuyer":false,
# "isMaker":false,
# "isBestMatch":true
# }
#
timestamp = self.safe_integer_2(trade, 'T', 'time')
priceString = self.safe_string_2(trade, 'p', 'price')
amountString = self.safe_string_2(trade, 'q', 'qty')
marketId = self.safe_string(trade, 'symbol')
symbol = self.safe_symbol(marketId, market)
id = self.safe_string_2(trade, 't', 'a')
id = self.safe_string_2(trade, 'id', 'tradeId', id)
side = None
orderId = self.safe_string(trade, 'orderId')
if 'm' in trade:
side = 'sell' if trade['m'] else 'buy' # self is reversed intentionally
elif 'isBuyerMaker' in trade:
side = 'sell' if trade['isBuyerMaker'] else 'buy'
elif 'side' in trade:
side = self.safe_string_lower(trade, 'side')
else:
if 'isBuyer' in trade:
side = 'buy' if trade['isBuyer'] else 'sell' # self is a True side
fee = None
if 'commission' in trade:
fee = {
'cost': self.safe_string(trade, 'commission'),
'currency': self.safe_currency_code(self.safe_string(trade, 'commissionAssert')),
}
takerOrMaker = None
if 'isMaker' in trade:
takerOrMaker = 'maker' if trade['isMaker'] else 'taker'
if 'maker' in trade:
takerOrMaker = 'maker' if trade['maker'] else 'taker'
return self.safe_trade({
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': id,
'order': orderId,
'type': None,
'side': side,
'takerOrMaker': takerOrMaker,
'price': priceString,
'amount': amountString,
'cost': None,
'fee': fee,
}, market)
def fetch_trades(self, symbol, since=None, limit=None, params={}):
"""
get the list of most recent trades for a particular symbol
:param str symbol: unified symbol of the market to fetch trades for
:param int|None since: timestamp in ms of the earliest trade to fetch
:param int|None limit: the maximum amount of trades to fetch
:param dict params: extra parameters specific to the bitrue api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html?#public-trades>`
"""
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'limit': 100, # default 100, max = 1000
}
method = self.safe_string(self.options, 'fetchTradesMethod', 'v1PublicGetAggTrades')
if limit is not None:
request['limit'] = limit # default 100, max 1000
#
# Caveats:
# - default limit(500) applies only if no other parameters set, trades up
# to the maximum limit may be returned to satisfy other parameters
# - if both limit and time window is set and time window contains more
# trades than the limit then the last trades from the window are returned
# - 'tradeId' accepted and returned by self method is "aggregate" trade id
# which is different from actual trade id
# - setting both fromId and time window results in error
response = getattr(self, method)(self.extend(request, params))
#
# aggregate trades
#
# [
# {
# "a": 26129, # Aggregate tradeId
# "p": "0.01633102", # Price
# "q": "4.70443515", # Quantity
# "f": 27781, # First tradeId
# "l": 27781, # Last tradeId
# "T": 1498793709153, # Timestamp
# "m": True, # Was the buyer the maker?
# "M": True # Was the trade the best price match?
# }
# ]
#
# recent public trades and historical public trades
#
# [
# {
# "id": 28457,
# "price": "4.00000100",
# "qty": "12.00000000",
# "time": 1499865549590,
# "isBuyerMaker": True,
# "isBestMatch": True
# }
# ]
#
return self.parse_trades(response, market, since, limit)
def parse_order_status(self, status):
statuses = {
'NEW': 'open',
'PARTIALLY_FILLED': 'open',
'FILLED': 'closed',
'CANCELED': 'canceled',
'PENDING_CANCEL': 'canceling', # currently unused
'REJECTED': 'rejected',
'EXPIRED': 'expired',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# "symbol":"USDCUSDT",
# "orderId":2878854881,
# "clientOrderId":"",
# "transactTime":1635551031276
# }
#
# fetchOpenOrders
#
# {
# "symbol":"USDCUSDT",
# "orderId":"2878854881",
# "clientOrderId":"",
# "price":"1.1000000000000000",
# "origQty":"100.0000000000000000",
# "executedQty":"0.0000000000000000",
# "cummulativeQuoteQty":"0.0000000000000000",
# "status":"NEW",
# "timeInForce":"",
# "type":"LIMIT",
# "side":"SELL",
# "stopPrice":"",
# "icebergQty":"",
# "time":1635551031000,
# "updateTime":1635551031000,
# "isWorking":false
# }
#
status = self.parse_order_status(self.safe_string(order, 'status'))
marketId = self.safe_string(order, 'symbol')
symbol = self.safe_symbol(marketId, market)
filled = self.safe_string(order, 'executedQty')
timestamp = None
lastTradeTimestamp = None
if 'time' in order:
timestamp = self.safe_integer(order, 'time')
elif 'transactTime' in order:
timestamp = self.safe_integer(order, 'transactTime')
elif 'updateTime' in order:
if status == 'open':
if Precise.string_gt(filled, '0'):
lastTradeTimestamp = self.safe_integer(order, 'updateTime')
else:
timestamp = self.safe_integer(order, 'updateTime')
average = self.safe_string(order, 'avgPrice')
price = self.safe_string(order, 'price')
amount = self.safe_string(order, 'origQty')
# - Spot/Margin market: cummulativeQuoteQty
# - Futures market: cumQuote.
# Note self is not the actual cost, since Binance futures uses leverage to calculate margins.
cost = self.safe_string_2(order, 'cummulativeQuoteQty', 'cumQuote')
id = self.safe_string(order, 'orderId')
type = self.safe_string_lower(order, 'type')
side = self.safe_string_lower(order, 'side')
fills = self.safe_value(order, 'fills', [])
clientOrderId = self.safe_string(order, 'clientOrderId')
timeInForce = self.safe_string(order, 'timeInForce')
postOnly = (type == 'limit_maker') or (timeInForce == 'GTX')
if type == 'limit_maker':
type = 'limit'
stopPriceString = self.safe_string(order, 'stopPrice')
stopPrice = self.parse_number(self.omit_zero(stopPriceString))
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'postOnly': postOnly,
'side': side,
'price': price,
'stopPrice': stopPrice,
'amount': amount,
'cost': cost,
'average': average,
'filled': filled,
'remaining': None,
'status': status,
'fee': None,
'trades': fills,
}, market)
def create_order(self, symbol, type, side, amount, price=None, params={}):
"""
create a trade order
:param str symbol: unified symbol of the market to create an order in
:param str type: 'market' or 'limit'
:param str side: 'buy' or 'sell'
:param float amount: how much of currency you want to trade in units of base currency
:param float price: the price at which the order is to be fullfilled, in units of the quote currency, ignored in market orders
:param dict params: extra parameters specific to the bitrue api endpoint
:returns dict: an `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
self.load_markets()
market = self.market(symbol)
uppercaseType = type.upper()
validOrderTypes = self.safe_value(market['info'], 'orderTypes')
if not self.in_array(uppercaseType, validOrderTypes):
raise InvalidOrder(self.id + ' ' + type + ' is not a valid order type in market ' + symbol)
request = {
'symbol': market['id'],
'side': side.upper(),
'type': uppercaseType,
# 'timeInForce': '',
'quantity': self.amount_to_precision(symbol, amount),
# 'price': self.price_to_precision(symbol, price),
# 'newClientOrderId': clientOrderId, # automatically generated if not sent
# 'stopPrice': self.price_to_precision(symbol, 'stopPrice'),
# 'icebergQty': self.amount_to_precision(symbol, icebergQty),
}
clientOrderId = self.safe_string_2(params, 'newClientOrderId', 'clientOrderId')
if clientOrderId is not None:
params = self.omit(params, ['newClientOrderId', 'clientOrderId'])
request['newClientOrderId'] = clientOrderId
if uppercaseType == 'LIMIT':
if price is None:
raise InvalidOrder(self.id + ' createOrder() requires a price argument')
request['price'] = self.price_to_precision(symbol, price)
stopPrice = self.safe_number(params, 'stopPrice')
if stopPrice is not None:
params = self.omit(params, 'stopPrice')
request['stopPrice'] = self.price_to_precision(symbol, stopPrice)
response = self.v1PrivatePostOrder(self.extend(request, params))
#
# {
# "symbol":"USDCUSDT",
# "orderId":2878854881,
# "clientOrderId":"",
# "transactTime":1635551031276
# }
#
return self.parse_order(response, market)
def fetch_order(self, id, symbol=None, params={}):
"""
fetches information on an order made by the user
:param str symbol: unified symbol of the market the order was made in
:param dict params: extra parameters specific to the bitrue api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
clientOrderId = self.safe_value_2(params, 'origClientOrderId', 'clientOrderId')
if clientOrderId is not None:
request['origClientOrderId'] = clientOrderId
else:
request['orderId'] = id
query = self.omit(params, ['type', 'clientOrderId', 'origClientOrderId'])
response = self.v1PrivateGetOrder(self.extend(request, query))
return self.parse_order(response, market)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchClosedOrders() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'orderId': 123445, # long
# 'startTime': since,
# 'endTime': self.milliseconds(),
# 'limit': limit, # default 100, max 1000
}
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit # default 100, max 1000
response = self.v1PrivateGetAllOrders(self.extend(request, params))
#
# [
# {
# "symbol": "LTCBTC",
# "orderId": 1,
# "clientOrderId": "myOrder1",
# "price": "0.1",
# "origQty": "1.0",
# "executedQty": "0.0",
# "cummulativeQuoteQty": "0.0",
# "status": "NEW",
# "timeInForce": "GTC",
# "type": "LIMIT",
# "side": "BUY",
# "stopPrice": "0.0",
# "icebergQty": "0.0",
# "time": 1499827319559,
# "updateTime": 1499827319559,
# "isWorking": True
# }
# ]
#
return self.parse_orders(response, market, since, limit)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all unfilled currently open orders
:param str symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch open orders for
:param int|None limit: the maximum number of open orders structures to retrieve
:param dict params: extra parameters specific to the bitrue api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOpenOrders() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.v1PrivateGetOpenOrders(self.extend(request, params))
#
# [
# {
# "symbol":"USDCUSDT",
# "orderId":"2878854881",
# "clientOrderId":"",
# "price":"1.1000000000000000",
# "origQty":"100.0000000000000000",
# "executedQty":"0.0000000000000000",
# "cummulativeQuoteQty":"0.0000000000000000",
# "status":"NEW",
# "timeInForce":"",
# "type":"LIMIT",
# "side":"SELL",
# "stopPrice":"",
# "icebergQty":"",
# "time":1635551031000,
# "updateTime":1635551031000,
# "isWorking":false
# }
# ]
#
return self.parse_orders(response, market, since, limit)
def cancel_order(self, id, symbol=None, params={}):
"""
cancels an open order
:param str id: order id
:param str symbol: unified symbol of the market the order was made in
:param dict params: extra parameters specific to the bitrue api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
origClientOrderId = self.safe_value_2(params, 'origClientOrderId', 'clientOrderId')
request = {
'symbol': market['id'],
# 'orderId': id,
# 'origClientOrderId': id,
# 'newClientOrderId': id,
}
if origClientOrderId is None:
request['orderId'] = id
else:
request['origClientOrderId'] = origClientOrderId
query = self.omit(params, ['type', 'origClientOrderId', 'clientOrderId'])
response = self.v1PrivateDeleteOrder(self.extend(request, query))
#
# {
# "symbol": "LTCBTC",
# "origClientOrderId": "myOrder1",
# "orderId": 1,
# "clientOrderId": "cancelMyOrder1"
# }
#
return self.parse_order(response, market)
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all trades made by the user
:param str|None symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch trades for
:param int|None limit: the maximum number of trades structures to retrieve
:param dict params: extra parameters specific to the bitrue api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html#trade-structure>`
"""
method = self.safe_string(self.options, 'fetchMyTradesMethod', 'v2PrivateGetMyTrades')
if (symbol is None) and (method == 'v2PrivateGetMyTrades'):
raise ArgumentsRequired(self.id + ' v2PrivateGetMyTrades() requires a symbol argument')
self.load_markets()
request = {
# 'symbol': market['id'],
# 'startTime': since,
# 'endTime': self.milliseconds(),
# 'fromId': 12345, # trade id to fetch from, most recent trades by default
# 'limit': limit, # default 100, max 1000
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit
response = getattr(self, method)(self.extend(request, params))
#
# [
# {
# "symbol":"USDCUSDT",
# "id":20725156,
# "orderId":2880918576,
# "origClientOrderId":null,
# "price":"0.9996000000000000",
# "qty":"100.0000000000000000",
# "commission":null,
# "commissionAssert":null,
# "time":1635558511000,
# "isBuyer":false,
# "isMaker":false,
# "isBestMatch":true
# }
# ]
#
return self.parse_trades(response, market, since, limit)
def fetch_deposits(self, code=None, since=None, limit=None, params={}):
"""
fetch all deposits made to an account
:param str code: unified currency code
:param int|None since: the earliest time in ms to fetch deposits for
:param int|None limit: the maximum number of deposits structures to retrieve
:param dict params: extra parameters specific to the bitrue api endpoint
:returns [dict]: a list of `transaction structures <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
if code is None:
raise ArgumentsRequired(self.id + ' fetchDeposits() requires a code argument')
self.load_markets()
currency = self.currency(code)
request = {
'coin': currency['id'],
'status': 1, # 0 init, 1 finished, default 0
# 'offset': 0,
# 'limit': limit, # default 10, max 1000
# 'startTime': since,
# 'endTime': self.milliseconds(),
}
if since is not None:
request['startTime'] = since
# request['endTime'] = self.sum(since, 7776000000)
if limit is not None:
request['limit'] = limit
response = self.v1PrivateGetDepositHistory(self.extend(request, params))
#
# {
# "code":200,
# "msg":"succ",
# "data":[
# {
# "id":2659137,
# "symbol":"USDC",
# "amount":"200.0000000000000000",
# "fee":"0.0E-15",
# "createdAt":1635503169000,
# "updatedAt":1635503202000,
# "addressFrom":"0x2faf487a4414fe77e2327f0bf4ae2a264a776ad2",
# "addressTo":"0x190ceccb1f8bfbec1749180f0ba8922b488d865b",
# "txid":"0x9970aec41099ac385568859517308707bc7d716df8dabae7b52f5b17351c3ed0",
# "confirmations":5,
# "status":0,
# "tagType":null,
# },
# {
# "id":2659137,
# "symbol": "XRP",
# "amount": "20.0000000000000000",
# "fee": "0.0E-15",
# "createdAt": 1544669393000,
# "updatedAt": 1544669413000,
# "addressFrom": "",
# "addressTo": "raLPjTYeGezfdb6crXZzcC8RkLBEwbBHJ5_18113641",
# "txid": "515B23E1F9864D3AF7F5B4C4FCBED784BAE861854FAB95F4031922B6AAEFC7AC",
# "confirmations": 7,
# "status": 1,
# "tagType": "Tag"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_transactions(data, currency, since, limit)
def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
"""
fetch all withdrawals made from an account
:param str code: unified currency code
:param int|None since: the earliest time in ms to fetch withdrawals for
:param int|None limit: the maximum number of withdrawals structures to retrieve
:param dict params: extra parameters specific to the bitrue api endpoint
:returns [dict]: a list of `transaction structures <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
if code is None:
raise ArgumentsRequired(self.id + ' fetchWithdrawals() requires a code argument')
self.load_markets()
currency = self.currency(code)
request = {
'coin': currency['id'],
'status': 5, # 0 init, 5 finished, 6 canceled, default 0
# 'offset': 0,
# 'limit': limit, # default 10, max 1000
# 'startTime': since,
# 'endTime': self.milliseconds(),
}
if since is not None:
request['startTime'] = since
# request['endTime'] = self.sum(since, 7776000000)
if limit is not None:
request['limit'] = limit
response = self.v1PrivateGetWithdrawHistory(self.extend(request, params))
#
# {
# "code": 200,
# "msg": "succ",
# "data": {
# "msg": null,
# "amount": 1000,
# "fee": 1,
# "ctime": null,
# "coin": "usdt_erc20",
# "addressTo": "0x2edfae3878d7b6db70ce4abed177ab2636f60c83"
# }
# }
#
data = self.safe_value(response, 'data', {})
return self.parse_transactions(data, currency)
def parse_transaction_status_by_type(self, status, type=None):
statusesByType = {
'deposit': {
'0': 'pending',
'1': 'ok',
},
'withdrawal': {
'0': 'pending', # Email Sent
'5': 'ok', # Failure
'6': 'canceled',
},
}
statuses = self.safe_value(statusesByType, type, {})
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# fetchDeposits
#
# {
# "symbol": "XRP",
# "amount": "261.3361000000000000",
# "fee": "0.0E-15",
# "createdAt": 1548816979000,
# "updatedAt": 1548816999000,
# "addressFrom": "",
# "addressTo": "raLPjTYeGezfdb6crXZzcC8RkLBEwbBHJ5_18113641",
# "txid": "86D6EB68A7A28938BCE06BD348F8C07DEF500C5F7FE92069EF8C0551CE0F2C7D",
# "confirmations": 8,
# "status": 1,
# "tagType": "Tag"
# },
# {
# "symbol": "XRP",
# "amount": "20.0000000000000000",
# "fee": "0.0E-15",
# "createdAt": 1544669393000,
# "updatedAt": 1544669413000,
# "addressFrom": "",
# "addressTo": "raLPjTYeGezfdb6crXZzcC8RkLBEwbBHJ5_18113641",
# "txid": "515B23E1F9864D3AF7F5B4C4FCBED784BAE861854FAB95F4031922B6AAEFC7AC",
# "confirmations": 7,
# "status": 1,
# "tagType": "Tag"
# }
#
# fetchWithdrawals
#
# {
# "id": 183745,
# "symbol": "usdt_erc20",
# "amount": "8.4000000000000000",
# "fee": "1.6000000000000000",
# "payAmount": "0.0000000000000000",
# "createdAt": 1595336441000,
# "updatedAt": 1595336576000,
# "addressFrom": "",
# "addressTo": "0x2edfae3878d7b6db70ce4abed177ab2636f60c83",
# "txid": "",
# "confirmations": 0,
# "status": 6,
# "tagType": null
# }
#
# withdraw
#
# {
# "msg": null,
# "amount": 1000,
# "fee": 1,
# "ctime": null,
# "coin": "usdt_erc20",
# "addressTo": "0x2edfae3878d7b6db70ce4abed177ab2636f60c83"
# }
#
id = self.safe_string(transaction, 'id')
tagType = self.safe_string(transaction, 'tagType')
addressTo = self.safe_string(transaction, 'addressTo')
addressFrom = self.safe_string(transaction, 'addressFrom')
tagTo = None
tagFrom = None
if tagType is not None:
if addressTo is not None:
parts = addressTo.split('_')
addressTo = self.safe_string(parts, 0)
tagTo = self.safe_string(parts, 1)
if addressFrom is not None:
parts = addressFrom.split('_')
addressFrom = self.safe_string(parts, 0)
tagFrom = self.safe_string(parts, 1)
txid = self.safe_string(transaction, 'txid')
timestamp = self.safe_integer(transaction, 'createdAt')
updated = self.safe_integer(transaction, 'updatedAt')
payAmount = ('payAmount' in transaction)
ctime = ('ctime' in transaction)
type = 'withdrawal' if (payAmount or ctime) else 'deposit'
status = self.parse_transaction_status_by_type(self.safe_string(transaction, 'status'), type)
amount = self.safe_number(transaction, 'amount')
network = None
currencyId = self.safe_string(transaction, 'symbol')
if currencyId is not None:
parts = currencyId.split('_')
currencyId = self.safe_string(parts, 0)
networkId = self.safe_string(parts, 1)
if networkId is not None:
network = networkId.upper()
code = self.safe_currency_code(currencyId, currency)
feeCost = self.safe_number(transaction, 'fee')
fee = None
if feeCost is not None:
fee = {'currency': code, 'cost': feeCost}
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'network': network,
'address': addressTo,
'addressTo': addressTo,
'addressFrom': addressFrom,
'tag': tagTo,
'tagTo': tagTo,
'tagFrom': tagFrom,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': updated,
'internal': False,
'fee': fee,
}
def withdraw(self, code, amount, address, tag=None, params={}):
"""
make a withdrawal
:param str code: unified currency code
:param float amount: the amount to withdraw
:param str address: the address to withdraw to
:param str|None tag:
:param dict params: extra parameters specific to the bitrue api endpoint
:returns dict: a `transaction structure <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
tag, params = self.handle_withdraw_tag_and_params(tag, params)
self.check_address(address)
self.load_markets()
currency = self.currency(code)
chainName = self.safe_string(params, 'chainName')
if chainName is None:
networks = self.safe_value(currency, 'networks', {})
network = self.safe_string_upper(params, 'network') # self line allows the user to specify either ERC20 or ETH
networkEntry = self.safe_value(networks, network, {})
chainName = self.safe_string(networkEntry, 'id') # handle ERC20>ETH alias
if chainName is None:
raise ArgumentsRequired(self.id + ' withdraw() requires a network parameter or a chainName parameter')
params = self.omit(params, 'network')
request = {
'coin': currency['id'].upper(),
'amount': amount,
'addressTo': address,
'chainName': chainName, # 'ERC20', 'TRC20', 'SOL'
# 'addressMark': '', # mark of address
# 'addrType': '', # type of address
# 'tag': tag,
}
if tag is not None:
request['tag'] = tag
response = self.v1PrivatePostWithdrawCommit(self.extend(request, params))
# {id: '9a67628b16ba4988ae20d329333f16bc'}
return self.parse_transaction(response, currency)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
version, access = api
url = self.urls['api'][version] + '/' + self.implode_params(path, params)
params = self.omit(params, self.extract_params(path))
if access == 'private':
self.check_required_credentials()
recvWindow = self.safe_integer(self.options, 'recvWindow', 5000)
query = self.urlencode(self.extend({
'timestamp': self.nonce(),
'recvWindow': recvWindow,
}, params))
signature = self.hmac(self.encode(query), self.encode(self.secret))
query += '&' + 'signature=' + signature
headers = {
'X-MBX-APIKEY': self.apiKey,
}
if (method == 'GET') or (method == 'DELETE'):
url += '?' + query
else:
body = query
headers['Content-Type'] = 'application/x-www-form-urlencoded'
else:
if params:
url += '?' + self.urlencode(params)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if (code == 418) or (code == 429):
raise DDoSProtection(self.id + ' ' + str(code) + ' ' + reason + ' ' + body)
# error response in a form: {"code": -1013, "msg": "Invalid quantity."}
# following block cointains legacy checks against message patterns in "msg" property
# will switch "code" checks eventually, when we know all of them
if code >= 400:
if body.find('Price * QTY is zero or less') >= 0:
raise InvalidOrder(self.id + ' order cost = amount * price is zero or less ' + body)
if body.find('LOT_SIZE') >= 0:
raise InvalidOrder(self.id + ' order amount should be evenly divisible by lot size ' + body)
if body.find('PRICE_FILTER') >= 0:
raise InvalidOrder(self.id + ' order price is invalid, i.e. exceeds allowed price precision, exceeds min price or max price limits or is invalid float value in general, use self.price_to_precision(symbol, amount) ' + body)
if response is None:
return # fallback to default error handler
# check success value for wapi endpoints
# response in format {'msg': 'The coin does not exist.', 'success': True/false}
success = self.safe_value(response, 'success', True)
if not success:
message = self.safe_string(response, 'msg')
parsedMessage = None
if message is not None:
try:
parsedMessage = json.loads(message)
except Exception as e:
# do nothing
parsedMessage = None
if parsedMessage is not None:
response = parsedMessage
message = self.safe_string(response, 'msg')
if message is not None:
self.throw_exactly_matched_exception(self.exceptions['exact'], message, self.id + ' ' + message)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, self.id + ' ' + message)
# checks against error codes
error = self.safe_string(response, 'code')
if error is not None:
# https://github.com/ccxt/ccxt/issues/6501
# https://github.com/ccxt/ccxt/issues/7742
if (error == '200') or Precise.string_equals(error, '0'):
return
# a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."}
# despite that their message is very confusing, it is raised by Binance
# on a temporary ban, the API key is valid, but disabled for a while
if (error == '-2015') and self.options['hasAlreadyAuthenticatedSuccessfully']:
raise DDoSProtection(self.id + ' temporary banned: ' + body)
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], error, feedback)
raise ExchangeError(feedback)
if not success:
raise ExchangeError(self.id + ' ' + body)
def calculate_rate_limiter_cost(self, api, method, path, params, config={}, context={}):
if ('noSymbol' in config) and not ('symbol' in params):
return config['noSymbol']
elif ('byLimit' in config) and ('limit' in params):
limit = params['limit']
byLimit = config['byLimit']
for i in range(0, len(byLimit)):
entry = byLimit[i]
if limit <= entry[0]:
return entry[1]
return self.safe_integer(config, 'cost', 1)
| 45.628183 | 238 | 0.497166 |
793eea36363f613feb01b69b3e7dab740d24e192 | 1,238 | py | Python | robotiq_gripper/robotiq_control/scripts/cmodel_tcp_driver.py | windiboy/arm_gazebo | 6c0b3dd59fd6dd4f4016932e1f78281ca07c40ed | [
"MIT"
] | 1 | 2022-03-08T03:36:46.000Z | 2022-03-08T03:36:46.000Z | robotiq_gripper/robotiq_control/scripts/cmodel_tcp_driver.py | windiboy/arm_gazebo | 6c0b3dd59fd6dd4f4016932e1f78281ca07c40ed | [
"MIT"
] | null | null | null | robotiq_gripper/robotiq_control/scripts/cmodel_tcp_driver.py | windiboy/arm_gazebo | 6c0b3dd59fd6dd4f4016932e1f78281ca07c40ed | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
import socket
import rospy
from robotiq_control.cmodel_base import RobotiqCModel, ComModbusTcp
from robotiq_msgs.msg import CModelCommand, CModelStatus
def mainLoop(address):
# Gripper is a C-Model with a TCP connection
gripper = RobotiqCModel()
gripper.client = ComModbusTcp()
# We connect to the address received as an argument
gripper.client.connectToDevice(address)
# The Gripper status
pub = rospy.Publisher('status', CModelStatus, queue_size=3)
# The Gripper command
rospy.Subscriber('command', CModelCommand, gripper.refreshCommand)
while not rospy.is_shutdown():
# Get and publish the Gripper status
status = gripper.getStatus()
pub.publish(status)
# Wait a little
rospy.sleep(0.05)
# Send the most recent command
gripper.sendCommand()
# Wait a little
rospy.sleep(0.05)
if __name__ == '__main__':
rospy.init_node('cmodel_tcp_driver')
# Verify user gave a legal IP address
try:
ip = sys.argv[1]
socket.inet_aton(ip)
except socket.error:
rospy.logfatal('[cmodel_tcp_driver] Please provide a valid IP address')
# Run the main loop
try:
mainLoop(sys.argv[1])
except rospy.ROSInterruptException: pass
| 28.790698 | 75 | 0.732633 |
793eea38b599f4329421a84eadafa3db723b85e7 | 588 | py | Python | lpthw/03/ex3.py | dummymael/learn_python | 5c06355d566a09e9de92b72d182c2fe78dc5300d | [
"BSD-2-Clause"
] | null | null | null | lpthw/03/ex3.py | dummymael/learn_python | 5c06355d566a09e9de92b72d182c2fe78dc5300d | [
"BSD-2-Clause"
] | null | null | null | lpthw/03/ex3.py | dummymael/learn_python | 5c06355d566a09e9de92b72d182c2fe78dc5300d | [
"BSD-2-Clause"
] | null | null | null | print "I will now count my chickens:"
print "Hens", 25.0 + 30.0 / 6.0
print "Roosters", 100.0 - 25.0 * 3.0 % 4.0
print "Now I will count the eggs:"
print 3 + 2 + 1 - 5 + 4.0 % 2.0 - 1.0 / 4.0 + 6
print "Is it true that 3 + 2 < 5 - 7?"
print 3 + 2 < 5 - 7
print "What is 3 + 2?", 3 + 2
print "What is 5 - 7?", 5 - 7
print "Oh, that's why it's False."
print "How about some more."
print "Is it greater?", 5 > -2
print "Is it greater or equal?", 5 >= -2
print "Is it lesser or equal?", 5 <= -2
print "100 % 16 is", 100 % 16
print "7 / 4 is", 7 / 4
print "7.0 / 4.0 is", 7.0 / 4.0
| 20.275862 | 47 | 0.554422 |
793eeb90c00b7b4080b36ee82d593cc06217e46b | 13,959 | py | Python | threatconnect/IndicatorFilterMethods.py | jhurd-tc/threatconnect-python | 975041699daf74f5999d60a5d14447aad8c89b72 | [
"Apache-2.0"
] | null | null | null | threatconnect/IndicatorFilterMethods.py | jhurd-tc/threatconnect-python | 975041699daf74f5999d60a5d14447aad8c89b72 | [
"Apache-2.0"
] | null | null | null | threatconnect/IndicatorFilterMethods.py | jhurd-tc/threatconnect-python | 975041699daf74f5999d60a5d14447aad8c89b72 | [
"Apache-2.0"
] | null | null | null | """ standard """
import time
""" third-party """
import dateutil.parser
import pytz
""" custom """
from Config.FilterOperator import FilterOperator
import ApiProperties
import SharedMethods
from Config.ResourceType import ResourceType
from ErrorCodes import ErrorCodes
from PostFilterObject import PostFilterObject
from RequestObject import RequestObject
def add_adversary_id(self, data_int):
""" filter api results by adversary id """
# validation of data input
if not isinstance(data_int, int):
raise AttributeError(ErrorCodes.e4000.value.format(data_int))
prop = self._resource_properties['groups']
ro = RequestObject()
ro.set_description('api filter by adversary id {0}'.format(data_int))
ro.set_http_method(prop['http_method'])
ro.set_owner_allowed(prop['owner_allowed'])
ro.set_request_uri(prop['uri'], ['adversaries', data_int])
ro.set_resource_pagination(prop['pagination'])
ro.set_resource_type(self._resource_type)
self._add_request_objects(ro)
def add_campaign_id(self, data_int):
""" filter api results by campaign id """
# validation of data input
if not isinstance(data_int, int):
raise AttributeError(ErrorCodes.e4005.value.format(data_int))
prop = self._resource_properties['groups']
ro = RequestObject()
ro.set_description('api filter by campaign id {0}'.format(data_int))
ro.set_http_method(prop['http_method'])
ro.set_owner_allowed(prop['owner_allowed'])
ro.set_request_uri(prop['uri'], ['campaigns', data_int])
ro.set_resource_pagination(prop['pagination'])
ro.set_resource_type(self._resource_type)
self._add_request_objects(ro)
# def add_custom_type_id(self, data_int):
# # validation of data input
# if not isinstance(data_int, int):
# raise AttributeError(ErrorCodes.e4000.value.format(data_int))
#
# prop = self._resource_properties['groups']
# ro = RequestObject()
# ro.set_description('api filter by adversary id {0}'.format(data_int))
# ro.set_http_method(prop['http_method'])
# ro.set_owner_allowed(prop['owner_allowed'])
# ro.set_request_uri(prop['uri'], ['adversaries', data_int])
# ro.set_resource_pagination(prop['pagination'])
# ro.set_resource_type(self._resource_type)
# self._add_request_objects(ro)
def add_document_id(self, data_int):
""" filter api results by document id """
# validation of data input
if not isinstance(data_int, int):
raise AttributeError(ErrorCodes.e4020.value.format(data_int))
prop = self._resource_properties['groups']
ro = RequestObject()
ro.set_description('api filter by documents id {0}'.format(data_int))
ro.set_http_method(prop['http_method'])
ro.set_owner_allowed(prop['owner_allowed'])
ro.set_request_uri(prop['uri'], ['documents', data_int])
ro.set_resource_pagination(prop['pagination'])
ro.set_resource_type(self._resource_type)
self._add_request_objects(ro)
def add_email_id(self, data_int):
""" filter api results by email id """
# validation of data input
if not isinstance(data_int, int):
raise AttributeError(ErrorCodes.e4030.value.format(data_int))
prop = self._resource_properties['groups']
ro = RequestObject()
ro.set_description('api filter by email id {0}'.format(data_int))
ro.set_http_method(prop['http_method'])
ro.set_owner_allowed(prop['owner_allowed'])
ro.set_request_uri(prop['uri'], ['emails', data_int])
ro.set_resource_pagination(prop['pagination'])
ro.set_resource_type(self._resource_type)
self._add_request_objects(ro)
def add_incident_id(self, data_int):
""" filter api results by incident id """
# validation of data input
if not isinstance(data_int, int):
raise AttributeError(ErrorCodes.e4040.value.format(data_int))
prop = self._resource_properties['groups']
ro = RequestObject()
ro.set_description('api filter by incident id {0}'.format(data_int))
ro.set_http_method(prop['http_method'])
ro.set_owner_allowed(prop['owner_allowed'])
ro.set_request_uri(prop['uri'], ['incidents', data_int])
ro.set_resource_pagination(prop['pagination'])
ro.set_resource_type(self._resource_type)
self._add_request_objects(ro)
def add_indicator(self, data, indicator_type = None):
""" filter api results by indicator """
# validation indicator
if not SharedMethods.validate_indicator(self.tc._indicators_regex, data):
raise AttributeError(ErrorCodes.e5010.value.format(data))
# get indicator uri attribute
if indicator_type is None:
indicator_type = SharedMethods.get_resource_type(self.tc._indicators_regex, data)
indicator_uri_attribute = ApiProperties.api_properties[indicator_type.name]['uri_attribute']
prop = self._resource_properties['indicator']
ro = RequestObject()
ro.set_description('api filter by indicator id {0}'.format(data))
ro.set_owner_allowed(prop['owner_allowed'])
# TODO: Check this logic
if self._resource_type == ResourceType.INDICATORS:
ro.set_request_uri(prop['uri'], [indicator_uri_attribute, SharedMethods.urlsafe(data)])
else:
ro.set_request_uri(prop['uri'], [SharedMethods.urlsafe(data)])
ro.set_resource_pagination(prop['pagination'])
ro.set_resource_type(indicator_type)
self._add_request_objects(ro)
def add_security_label(self, data):
""" filter api results by security label """
# validation of data input
if not isinstance(data, str):
raise AttributeError(ErrorCodes.e4050.value.format(data))
prop = self._resource_properties['security_labels']
ro = RequestObject()
ro.set_description('api filter by security label "{0}"'.format(data))
ro.set_owner_allowed(prop['owner_allowed'])
ro.set_resource_pagination(prop['pagination'])
ro.set_request_uri(prop['uri'], [SharedMethods.urlsafe(data)])
ro.set_resource_type(self._resource_type)
self._add_request_objects(ro)
def add_signature_id(self, data_int):
""" filter api results by signature id """
# validation of data input
if not isinstance(data_int, int):
raise AttributeError(ErrorCodes.e4060.value.format(data_int))
prop = self._resource_properties['groups']
ro = RequestObject()
ro.set_description('api filter by signature id {0}'.format(data_int))
ro.set_http_method(prop['http_method'])
ro.set_owner_allowed(prop['owner_allowed'])
ro.set_request_uri(prop['uri'], ['signatures', data_int])
ro.set_resource_pagination(prop['pagination'])
ro.set_resource_type(self._resource_type)
self._add_request_objects(ro)
def add_tag(self, data):
""" filter api results by tag """
# validation of data input
if not isinstance(data, str):
raise AttributeError(ErrorCodes.e4070.value.format(data))
prop = self._resource_properties['tags']
ro = RequestObject()
ro.set_description('api filter by tag "{0}"'.format(data))
ro.set_owner_allowed(prop['owner_allowed'])
ro.set_resource_pagination(prop['pagination'])
ro.set_request_uri(prop['uri'], [SharedMethods.urlsafe(data)])
ro.set_resource_type(self._resource_type)
self._add_request_objects(ro)
def add_task_id(self, data_int):
""" """
if not isinstance(data_int, int):
raise AttributeError(ErrorCodes.e4080.value.format(data_int))
prop = self._resource_properties['tasks']
ro = RequestObject()
ro.set_description('api filter by tasks id {0}'.format(data_int))
ro.set_http_method(prop['http_method'])
ro.set_owner_allowed(prop['owner_allowed'])
ro.set_request_uri(prop['uri'], [data_int])
ro.set_resource_pagination(prop['pagination'])
ro.set_resource_type(self._resource_type)
self._add_request_objects(ro)
def add_threat_id(self, data_int):
""" filter api results by threat id """
# validation of data input
if not isinstance(data_int, int):
raise AttributeError(ErrorCodes.e4080.value.format(data_int))
prop = self._resource_properties['groups']
ro = RequestObject()
ro.set_description('api filter by threat id {0}'.format(data_int))
ro.set_http_method(prop['http_method'])
ro.set_owner_allowed(prop['owner_allowed'])
ro.set_request_uri(prop['uri'], ['threats', data_int])
ro.set_resource_pagination(prop['pagination'])
ro.set_resource_type(self._resource_type)
self._add_request_objects(ro)
def add_victim_id(self, data_int):
""" filter api results by victim id """
# validation of data input
if not isinstance(data_int, int):
raise AttributeError(ErrorCodes.e4090.value.format(data_int))
prop = self._resource_properties['victims']
ro = RequestObject()
ro.set_description('api filter by victim id {0}'.format(data_int))
ro.set_owner_allowed(prop['owner_allowed'])
ro.set_resource_pagination(prop['pagination'])
ro.set_request_uri(prop['uri'], [data_int])
ro.set_resource_type(self._resource_type)
self._add_request_objects(ro)
#
# Post Filters
#
def add_pf_attribute(self, data, operator=FilterOperator.EQ):
""" add post filter by attribute
:type operator: FilterOperator
"""
post_filter = PostFilterObject()
post_filter.set_description('post filter by attribute {0} {1}'.format(operator.name, data))
post_filter.set_method('filter_attribute')
post_filter.set_filter(data)
post_filter.set_operator(operator)
self.add_post_filter(post_filter)
def add_pf_confidence(self, data, operator=FilterOperator.EQ):
""" add post filter by confidence
:type operator: FilterOperator
"""
post_filter = PostFilterObject()
post_filter.set_description('post filter by confidence {0} {1}'.format(operator.name, data))
post_filter.set_method('filter_confidence')
post_filter.set_filter(data)
post_filter.set_operator(operator)
self.add_post_filter(post_filter)
def add_pf_date_added(self, data_date, operator=FilterOperator.EQ):
""" add post filter by date
:type operator: FilterOperator
"""
# properly format date
date_added = data_date
date_added = dateutil.parser.parse(date_added).astimezone(pytz.utc)
date_added_seconds = int(time.mktime(date_added.timetuple()))
post_filter = PostFilterObject()
post_filter.set_description('post filter by date added {0} {1} seconds'.format(operator.name, date_added_seconds))
post_filter.set_method('filter_date_added')
post_filter.set_filter(date_added_seconds)
post_filter.set_operator(operator)
self.add_post_filter(post_filter)
def add_pf_file_type(self, data, operator=FilterOperator.EQ):
""" add post filter by file type
:type operator: FilterOperator
"""
post_filter = PostFilterObject()
post_filter.set_description('post filter by file type {0} {1}'.format(operator.name, data))
post_filter.set_method('filter_file_type')
post_filter.set_filter(data)
post_filter.set_operator(operator)
self.add_post_filter(post_filter)
def add_pf_last_modified(self, data_date, operator=FilterOperator.EQ):
""" add post filter by last modified
:type operator: FilterOperator
"""
# properly format date
last_modified = data_date
last_modified = dateutil.parser.parse(last_modified).astimezone(pytz.utc)
last_modified_seconds = int(time.mktime(last_modified.timetuple()))
post_filter = PostFilterObject()
post_filter.set_description('post filter by last modified {0} {1} seconds'.format(
operator.name, last_modified_seconds))
post_filter.set_method('filter_last_modified')
post_filter.set_filter(last_modified_seconds)
post_filter.set_operator(operator)
self.add_post_filter(post_filter)
def add_pf_rating(self, data, operator=FilterOperator.EQ):
""" add post filter by rating
:type operator: FilterOperator
"""
post_filter = PostFilterObject()
post_filter.set_description('post filter by rating {0} {1}'.format(operator.name, data))
post_filter.set_method('filter_rating')
post_filter.set_filter(data)
post_filter.set_operator(operator)
self.add_post_filter(post_filter)
def add_pf_threat_assess_confidence(self, data, operator=FilterOperator.EQ):
""" add post filter by threat assesses confidence
:type operator: FilterOperator
"""
post_filter = PostFilterObject()
post_filter.set_description('post filter by threat assess confidence {0} {1}'.format(operator.name, data))
post_filter.set_method('filter_threat_assess_confidence')
post_filter.set_filter(data)
post_filter.set_operator(operator)
self.add_post_filter(post_filter)
def add_pf_threat_assess_rating(self, data, operator=FilterOperator.EQ):
""" add post filter by threat assesses rating
:type operator: FilterOperator
"""
post_filter = PostFilterObject()
post_filter.set_description('post filter by threat assess rating {0} {1}'.format(operator.name, data))
post_filter.set_method('filter_threat_assess_rating')
post_filter.set_filter(data)
post_filter.set_operator(operator)
self.add_post_filter(post_filter)
def add_pf_tag(self, data, operator=FilterOperator.EQ):
""" add post filter by tag
:type operator: FilterOperator
"""
post_filter = PostFilterObject()
post_filter.set_description('post filter by tag {0} {1}'.format(operator.name, data))
post_filter.set_method('filter_tag')
post_filter.set_filter(data)
post_filter.set_operator(operator)
self.add_post_filter(post_filter)
def add_pf_type(self, data, operator=FilterOperator.EQ):
""" add post filter by type
:type operator: FilterOperator
"""
post_filter = PostFilterObject()
post_filter.set_description('post filter by type {0} {1}'.format(operator.name, data))
post_filter.set_method('filter_type')
post_filter.set_filter(data)
post_filter.set_operator(operator)
self.add_post_filter(post_filter)
| 36.831135 | 118 | 0.731141 |
793eecc0add50c889c52b908505f25194db86009 | 3,512 | py | Python | pytools/pic/tile_initialization.py | Krissmedt/imprunko | 94171d0d47171cc4b199cd52f5f29385cbff903e | [
"MIT"
] | null | null | null | pytools/pic/tile_initialization.py | Krissmedt/imprunko | 94171d0d47171cc4b199cd52f5f29385cbff903e | [
"MIT"
] | null | null | null | pytools/pic/tile_initialization.py | Krissmedt/imprunko | 94171d0d47171cc4b199cd52f5f29385cbff903e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import pyrunko.pic as pypic
def ind2loc(gridI, tileI, conf):
# grid coordinates
i, j, k = gridI
Nx = conf.Nx
Ny = conf.Ny
Nz = conf.Nz
# tile coordinates
l, m, n = tileI
NxMesh = conf.NxMesh
NyMesh = conf.NyMesh
NzMesh = conf.NzMesh
# grid spacing; start point + step
xmin = conf.xmin
ymin = conf.ymin
zmin = conf.zmin
dx = 1.0 # conf.dx
dy = 1.0 # conf.dy
dz = 1.0 # conf.dz
# calculate coordinate extent
x = xmin + i * (NxMesh) * dx + l * dx
y = ymin + j * (NyMesh) * dy + m * dy
z = zmin + k * (NzMesh) * dz + n * dz
return [x, y, z]
def initialize_tile(tile, indx, n, conf):
# set parameters
tile.cfl = conf.cfl
ppc = conf.ppc # / conf.Nspecies
# load particle containers
for sps in range(conf.Nspecies):
if conf.threeD:
container = pypic.threeD.ParticleContainer()
elif conf.twoD:
container = pypic.twoD.ParticleContainer()
# alternate injection between - and + charged prtcls
if sps % 2 == 0:
container.q = -conf.qe
else:
container.q = -conf.qi
# reserve memory for particles
Nprtcls = conf.NxMesh * conf.NyMesh * conf.NzMesh * conf.ppc
container.reserve(Nprtcls)
tile.set_container(container)
# set bounding box of the tile
mins = ind2loc(indx, (0, 0, 0), conf)
maxs = ind2loc(indx, (conf.NxMesh, conf.NyMesh, conf.NzMesh), conf)
if conf.threeD:
tile.set_tile_mins(mins[0:3])
tile.set_tile_maxs(maxs[0:3])
elif conf.twoD:
tile.set_tile_mins(mins[0:2])
tile.set_tile_maxs(maxs[0:2])
return
# load virtual tiles
def load_virtual_tiles(n, conf):
for cid in n.get_virtual_tiles():
tile_orig = n.get_tile(cid)
ind = tile_orig.index
# new prtcl tile;
# TODO: load_metainfo *HAS* to be after add_tile because
# add_tile modifies tile content.
if conf.threeD:
i,j,k = ind
tile = pypic.threeD.Tile(conf.NxMesh, conf.NyMesh, conf.NzMesh)
n.add_tile(tile, ind)
tile.load_metainfo(tile_orig.communication)
initialize_tile(tile, (i,j,k), n, conf)
elif conf.twoD:
i,j = ind
tile = pypic.twoD.Tile(conf.NxMesh, conf.NyMesh, conf.NzMesh)
n.add_tile(tile, ind)
tile.load_metainfo(tile_orig.communication)
initialize_tile(tile, (i,j,0), n, conf)
return
# 3D loading of pic tiles into grid
def load_tiles(n, conf):
for k in range(n.get_Nz()):
for j in range(n.get_Ny()):
for i in range(n.get_Nx()):
# print("{} ({},{}) {} ?= {}".format(n.rank, i,j, n.get_mpi_grid(i,j), ref[j,i]))
if conf.threeD:
if n.get_mpi_grid(i, j, k) == n.rank():
tile = pypic.threeD.Tile(conf.NxMesh, conf.NyMesh, conf.NzMesh)
ind = (i, j, k)
initialize_tile(tile, (i,j,k), n, conf)
n.add_tile(tile, ind)
elif conf.twoD:
if n.get_mpi_grid(i, j) == n.rank():
tile = pypic.twoD.Tile(conf.NxMesh, conf.NyMesh, conf.NzMesh)
ind = (i, j)
initialize_tile(tile, (i,j,k), n, conf)
n.add_tile(tile, ind)
return
| 26.014815 | 97 | 0.537301 |
793ef14f436e1debe2b85efd8006e8c4f2bf1489 | 16,128 | py | Python | data_collection/apache.py | pinckert/pinckert.com | 7d6ce3e37c6c39c1deebbceb40f4442d34c20e23 | [
"Apache-2.0"
] | 1 | 2020-07-29T22:45:07.000Z | 2020-07-29T22:45:07.000Z | data_collection/apache.py | pinckert/pinckert.com | 7d6ce3e37c6c39c1deebbceb40f4442d34c20e23 | [
"Apache-2.0"
] | null | null | null | data_collection/apache.py | pinckert/pinckert.com | 7d6ce3e37c6c39c1deebbceb40f4442d34c20e23 | [
"Apache-2.0"
] | null | null | null | <<<<<<< HEAD
#!/usr/bin/python
#from __future__ import print_function
import os
import sys
import urllib
import xml.etree.ElementTree as ET
import time
"""
Attempted using jenkinsapi, but after ~10 minutes, the following request failed on the apache build server...
import jenkinsapi
from jenkinsapi.jenkins import Jenkins
jenkins = Jenkins('https://builds.apache.org')
# Print all jobs in Jenkins
print(jenkins.items())
ERROR:root:Failed request at https://builds.apache.org/job/FlexJS Pipeline/job/f
eature-autobuild%2Fcleanup/api/python with params: None
...
File "C:\Python27\lib\jenkinsapi\jenkinsbase.py", line 83, in get_data
response.raise_for_status()
File "C:\Python27\lib\site-packages\requests\models.py", line 909, in raise_fo
r_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://buil
ds.apache.org/job/FlexJS%20Pipeline/job/feature-autobuild%2Fcleanup/api/python
"""
#
# Approach:
# Save a hierarchy of xml files that will be imported to DB on the server side.
# For each view, job, server, etc. provide a method to return a dict. of ID's and URL's
# Generate the filename/directory from the ID and use the URL to retrieve the .xml for the given resource
#
# Global constant
base_urls = {
"apache" : "https://builds.apache.org/",
# "ubuntu" : "https://jenkins.qa.ubuntu.com/",
# "jenkins" : "https://ci.jenkins.io/",
}
base_path = {
"apache" : "A-D", # limit to just the 'A-D' view
# "ubuntu" : "wily", # limit to just the 'wily' view
# "jenkins" : "",
}
directories = { "views" : "./views",
"servers" : "./servers",
"users" : "./users"
}
#
# Apache failed to clean up a few projects when they migrated...
#
kill_files = ["A-D/Commons-Compress-Windows/33.xml",
"A-D/Commons-Compress-Windows/46.xml",
"A-D/Commons-Compress-Windows/57.xml",
"A-D/Commons-Compress-Windows/59.xml",
"A-D/Commons-Compress-Windows/64.xml",
"A-D/AntLib-props/22.xml",
"A-D/AntLib-props/23.xml",
"A-D/AntLib-props/24.xml",
"A-D/AntLib-props/25.xml",
"A-D/AntLib-svn/13.xml",
"A-D/AntLib-svn/14.xml",
"A-D/AntLib-svn/15.xml",
"A-D/AntLib-antunit/31.xml",
"A-D/AntLib-antunit/32.xml",
"A-D/AntLib-antunit/33.xml",
"A-D/AntLib-antunit/34.xml",
]
# helper functions
def updateCounts(counts, classType):
type = classType["_class"];
if type in counts:
counts[type] += 1
else:
counts[type] = 1
#
# URL's are returned from REST API without the trailing /api/xml.
#
def buildPath(url, resource_type="", resource=""):
return url + "/" + resource_type + "/" + resource + "/api/xml"
def createDirectoryStructure(project):
for subdir in directories:
dir = project + "/" + subdir
if not (os.path.exists(dir)):
os.makedirs(dir)
#
# Save the XML at the given path
#
def saveXML(url, path):
xml = getXML(url)
if path != "":
fp = open(path, "w")
fp.write(xml)
fp.close()
return xml
def getXML(url):
time.sleep(15) # apache doesn't like rapid requests anymore. :-(
page = urllib.urlopen(url)
xml = page.read()
return xml
#
# Standalone routine to determine what job types are used in the project
#
def allJobTypes(base_url):
url = buildPath(base_url)
page = urllib.urlopen(url)
fp = open("jobs.txt", "w")
jobTypeCounts = dict()
xml = ET.fromstring(r1)
for job in xml.findall("job"):
for elem in job:
if elem.tag == "name":
updateCounts(jobTypeCounts, job.attrib)
fp.write(job.tag + " : " + elem.text + "\n")
fp.close()
for key in jobTypeCounts:
print key, " : ", jobTypeCounts[key]
#
# Return a dictionary of job names and their corresponding URL's
#
def jobsFromView(url):
# url = buildPath(base_url, "view", view_name)
print "url = " + url
#
# Changing
#
try:
page = getXML(url)
tree = ET.fromstring(page)
except e:
print "Error %d: %s<br>" % (e.args[0], e.args[1])
url_list = {}
for job in tree.findall("job"):
for key in job:
if key.tag == "name":
print "--%s--" % key.text
name = key.text
if key.tag == "url":
print "\turl: %s" % key.text
jobURL = key.text+"api/xml"
url_list[name] = jobURL
#
#
#
for key in url_list:
print "\t\t[" + key + "]\t" + url_list[key]
return url_list
def buildsFromJob(job): # input is the xml representing a job
tree = ET.fromstring(job)
url_list = {}
for build in tree.findall("build"):
for key in build:
if key.tag == "number":
build_number = key.text
if key.tag == "url":
build_url = key.text + "api/xml"
url_list[build_number] = build_url
return url_list
def allServers(base_url):
url = buildPath(base_url, "computer")
page = saveXML(url, "servers.xml")
tree = ET.fromstring(page)
server_list = {}
for server in tree.findall("computer"):
for child in server:
if child.tag == "displayName":
server_name = child.text
server_url = base_url + "/computer/" + child.text + "/api/xml"
server_list[server_name] = server_url
return server_list
#
# Debug routine: Print all tags from a ET tree
#
def printAll(tree, tag):
print "printAll()"
list = tree.findall(tag)
for item in list:
for key in item:
print "\t" + key.tag + "\t" + key.text
print "---- printAll()"
#
#
#
def allSubViews(view_tree):
#
# Use the presence of a "nestedView" tag to determine type of structure
# e.g. if nestedView is present view URLs represent directories, otherwise they represent the projects.
#
xml = getXML(view_tree)
tree = ET.fromstring(xml)
printAll(tree, "view")
viewList = []
subViews = tree.findall("view")
if not subViews: # implies that there are no subviews, just return the current url as an array with one entry.
print "No nested views, returning the path that was provied: " + view_tree
return viewList.append(view_tree)
else:
for view in subViews:
for key in view:
if key.tag == "url":
viewList.append(key.text)
return viewList
#
# Return all of the view URL's. Flattens nested views (is this desirable?)
#
def allViews(base_url):
url = buildPath(base_url)
xml = saveXML(url, "views.xml")
tree = ET.fromstring(xml)
view_list = {}
for view in tree.findall("view"):
for key in view:
if key.tag == "name":
view_name = key.text
if key.tag == "url":
view_URL = buildPath(key.text)
view_list[view_name] = view_URL
return view_list
#
# *** Main ***
#
projects = base_path.keys() # list of projects, need to parameterize
for key in projects:
print "Retrieving information for project : " + key
path = base_path[key]
createDirectoryStructure(path)
# server_list = allServers(base_urls[key])
# print "[Servers]"
# for server in server_list:
# print "\t" + server
# server_path = directories["servers"] + "/" + server + ".xml"
# saveXML(server_list[server], server_path)
view_urls = allViews("https://builds.apache.org/view/A-D/api/xml")
print "[views]"
# for url in view_urls:
# print "Getting views for : " + url
# view_list += allViews(url)
for view in view_urls:
print "\t" + view + "\t" + path + "\t" + view_urls[view]
jobList = jobsFromView(view_urls[view])
# saveXML(view_urls[view], path)
jobs = jobsFromView(view_urls[view])
if not os.path.exists(base_path[key]):
os.mkdir(base_path)
for job in jobs:
print "%s : %s" % (job, jobs[job])
job_dir = base_path[key] + "/" + job
if not os.path.exists(job_dir):
os.mkdir(job_dir)
job_url = jobs[job]
file_name = "%s/%s.xml" % (job_dir, job)
print "Saving job xml as : %s" % file_name
page = saveXML(job_url, file_name)
builds = buildsFromJob(page)
for build_number in builds:
build_filename = job_dir + "/" + str(build_number) + ".xml"
if os.path.exists(build_filename):
print "\t...Skippiong : %s" % build_filename
continue
print "\tSaving build as %s" % build_filename
saveXML(builds[build_number], build_filename)
#
# Remove "old" files to clean up data...
#
for file in kill_files:
os.remove(file)
sys.exit()
=======
#!/usr/bin/python
#from __future__ import print_function
import os
import sys
import urllib
import xml.etree.ElementTree as ET
import time
"""
Attempted using jenkinsapi, but after ~10 minutes, the following request failed on the apache build server...
import jenkinsapi
from jenkinsapi.jenkins import Jenkins
jenkins = Jenkins('https://builds.apache.org')
# Print all jobs in Jenkins
print(jenkins.items())
ERROR:root:Failed request at https://builds.apache.org/job/FlexJS Pipeline/job/f
eature-autobuild%2Fcleanup/api/python with params: None
...
File "C:\Python27\lib\jenkinsapi\jenkinsbase.py", line 83, in get_data
response.raise_for_status()
File "C:\Python27\lib\site-packages\requests\models.py", line 909, in raise_fo
r_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://buil
ds.apache.org/job/FlexJS%20Pipeline/job/feature-autobuild%2Fcleanup/api/python
"""
#
# Approach:
# Save a hierarchy of xml files that will be imported to DB on the server side.
# For each view, job, server, etc. provide a method to return a dict. of ID's and URL's
# Generate the filename/directory from the ID and use the URL to retrieve the .xml for the given resource
#
# Global constant
base_urls = {
"apache" : "https://builds.apache.org/",
# "ubuntu" : "https://jenkins.qa.ubuntu.com/",
# "jenkins" : "https://ci.jenkins.io/",
}
base_path = {
"apache" : "A-D", # limit to just the 'A-D' view
# "ubuntu" : "wily", # limit to just the 'wily' view
# "jenkins" : "",
}
directories = { "views" : "./views",
"servers" : "./servers",
"users" : "./users"
}
#
# Apache failed to clean up a few projects when they migrated...
#
kill_files = ["A-D/Commons-Compress-Windows/33.xml",
"A-D/Commons-Compress-Windows/46.xml",
"A-D/Commons-Compress-Windows/57.xml",
"A-D/Commons-Compress-Windows/59.xml",
"A-D/Commons-Compress-Windows/64.xml",
"A-D/AntLib-props/22.xml",
"A-D/AntLib-props/23.xml",
"A-D/AntLib-props/24.xml",
"A-D/AntLib-props/25.xml",
"A-D/AntLib-svn/13.xml",
"A-D/AntLib-svn/14.xml",
"A-D/AntLib-svn/15.xml",
"A-D/AntLib-antunit/31.xml",
"A-D/AntLib-antunit/32.xml",
"A-D/AntLib-antunit/33.xml",
"A-D/AntLib-antunit/34.xml",
]
# helper functions
def updateCounts(counts, classType):
type = classType["_class"];
if type in counts:
counts[type] += 1
else:
counts[type] = 1
#
# URL's are returned from REST API without the trailing /api/xml.
#
def buildPath(url, resource_type="", resource=""):
return url + "/" + resource_type + "/" + resource + "/api/xml"
def createDirectoryStructure(project):
for subdir in directories:
dir = project + "/" + subdir
if not (os.path.exists(dir)):
os.makedirs(dir)
#
# Save the XML at the given path
#
def saveXML(url, path):
xml = getXML(url)
if path != "":
fp = open(path, "w")
fp.write(xml)
fp.close()
return xml
def getXML(url):
time.sleep(15) # apache doesn't like rapid requests anymore. :-(
page = urllib.urlopen(url)
xml = page.read()
return xml
#
# Standalone routine to determine what job types are used in the project
#
def allJobTypes(base_url):
url = buildPath(base_url)
page = urllib.urlopen(url)
fp = open("jobs.txt", "w")
jobTypeCounts = dict()
xml = ET.fromstring(r1)
for job in xml.findall("job"):
for elem in job:
if elem.tag == "name":
updateCounts(jobTypeCounts, job.attrib)
fp.write(job.tag + " : " + elem.text + "\n")
fp.close()
for key in jobTypeCounts:
print key, " : ", jobTypeCounts[key]
#
# Return a dictionary of job names and their corresponding URL's
#
def jobsFromView(url):
# url = buildPath(base_url, "view", view_name)
print "url = " + url
#
# Changing
#
try:
page = getXML(url)
tree = ET.fromstring(page)
except e:
print "Error %d: %s<br>" % (e.args[0], e.args[1])
url_list = {}
for job in tree.findall("job"):
for key in job:
if key.tag == "name":
print "--%s--" % key.text
name = key.text
if key.tag == "url":
print "\turl: %s" % key.text
jobURL = key.text+"api/xml"
url_list[name] = jobURL
#
#
#
for key in url_list:
print "\t\t[" + key + "]\t" + url_list[key]
return url_list
def buildsFromJob(job): # input is the xml representing a job
tree = ET.fromstring(job)
url_list = {}
for build in tree.findall("build"):
for key in build:
if key.tag == "number":
build_number = key.text
if key.tag == "url":
build_url = key.text + "api/xml"
url_list[build_number] = build_url
return url_list
def allServers(base_url):
url = buildPath(base_url, "computer")
page = saveXML(url, "servers.xml")
tree = ET.fromstring(page)
server_list = {}
for server in tree.findall("computer"):
for child in server:
if child.tag == "displayName":
server_name = child.text
server_url = base_url + "/computer/" + child.text + "/api/xml"
server_list[server_name] = server_url
return server_list
#
# Debug routine: Print all tags from a ET tree
#
def printAll(tree, tag):
print "printAll()"
list = tree.findall(tag)
for item in list:
for key in item:
print "\t" + key.tag + "\t" + key.text
print "---- printAll()"
#
#
#
def allSubViews(view_tree):
#
# Use the presence of a "nestedView" tag to determine type of structure
# e.g. if nestedView is present view URLs represent directories, otherwise they represent the projects.
#
xml = getXML(view_tree)
tree = ET.fromstring(xml)
printAll(tree, "view")
viewList = []
subViews = tree.findall("view")
if not subViews: # implies that there are no subviews, just return the current url as an array with one entry.
print "No nested views, returning the path that was provied: " + view_tree
return viewList.append(view_tree)
else:
for view in subViews:
for key in view:
if key.tag == "url":
viewList.append(key.text)
return viewList
#
# Return all of the view URL's. Flattens nested views (is this desirable?)
#
def allViews(base_url):
url = buildPath(base_url)
xml = saveXML(url, "views.xml")
tree = ET.fromstring(xml)
view_list = {}
for view in tree.findall("view"):
for key in view:
if key.tag == "name":
view_name = key.text
if key.tag == "url":
view_URL = buildPath(key.text)
view_list[view_name] = view_URL
return view_list
#
# *** Main ***
#
projects = base_path.keys() # list of projects, need to parameterize
for key in projects:
print "Retrieving information for project : " + key
path = base_path[key]
createDirectoryStructure(path)
# server_list = allServers(base_urls[key])
# print "[Servers]"
# for server in server_list:
# print "\t" + server
# server_path = directories["servers"] + "/" + server + ".xml"
# saveXML(server_list[server], server_path)
view_urls = allViews("https://builds.apache.org/view/A-D/api/xml")
print "[views]"
# for url in view_urls:
# print "Getting views for : " + url
# view_list += allViews(url)
for view in view_urls:
print "\t" + view + "\t" + path + "\t" + view_urls[view]
jobList = jobsFromView(view_urls[view])
# saveXML(view_urls[view], path)
jobs = jobsFromView(view_urls[view])
if not os.path.exists(base_path[key]):
os.mkdir(base_path)
for job in jobs:
print "%s : %s" % (job, jobs[job])
job_dir = base_path[key] + "/" + job
if not os.path.exists(job_dir):
os.mkdir(job_dir)
job_url = jobs[job]
file_name = "%s/%s.xml" % (job_dir, job)
print "Saving job xml as : %s" % file_name
page = saveXML(job_url, file_name)
builds = buildsFromJob(page)
for build_number in builds:
build_filename = job_dir + "/" + str(build_number) + ".xml"
if os.path.exists(build_filename):
print "\t...Skippiong : %s" % build_filename
continue
print "\tSaving build as %s" % build_filename
saveXML(builds[build_number], build_filename)
#
# Remove "old" files to clean up data...
#
for file in kill_files:
os.remove(file)
sys.exit()
>>>>>>> cb97fa344060fddee1b1b68722c1e6b281f454c7
| 27.243243 | 112 | 0.664187 |
793ef2719f4fca50e0c77d77bafec50bd3a091ba | 1,964 | py | Python | alipay/aop/api/response/AlipayOpenAgentOrderQueryResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/response/AlipayOpenAgentOrderQueryResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/response/AlipayOpenAgentOrderQueryResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayOpenAgentOrderQueryResponse(AlipayResponse):
def __init__(self):
super(AlipayOpenAgentOrderQueryResponse, self).__init__()
self._agent_app_id = None
self._confirm_url = None
self._merchant_pid = None
self._order_status = None
self._reject_reason = None
@property
def agent_app_id(self):
return self._agent_app_id
@agent_app_id.setter
def agent_app_id(self, value):
self._agent_app_id = value
@property
def confirm_url(self):
return self._confirm_url
@confirm_url.setter
def confirm_url(self, value):
self._confirm_url = value
@property
def merchant_pid(self):
return self._merchant_pid
@merchant_pid.setter
def merchant_pid(self, value):
self._merchant_pid = value
@property
def order_status(self):
return self._order_status
@order_status.setter
def order_status(self, value):
self._order_status = value
@property
def reject_reason(self):
return self._reject_reason
@reject_reason.setter
def reject_reason(self, value):
self._reject_reason = value
def parse_response_content(self, response_content):
response = super(AlipayOpenAgentOrderQueryResponse, self).parse_response_content(response_content)
if 'agent_app_id' in response:
self.agent_app_id = response['agent_app_id']
if 'confirm_url' in response:
self.confirm_url = response['confirm_url']
if 'merchant_pid' in response:
self.merchant_pid = response['merchant_pid']
if 'order_status' in response:
self.order_status = response['order_status']
if 'reject_reason' in response:
self.reject_reason = response['reject_reason']
| 29.757576 | 106 | 0.677189 |
793ef3b3e370252658a290d5db22ddb478f94767 | 5,574 | py | Python | localstack/utils/aws/templating.py | rubencosta/localstack | 369f65f1635c09934e23e3dbca54fbdb31cb7b74 | [
"Apache-2.0"
] | 31,928 | 2017-07-04T03:06:28.000Z | 2022-03-31T22:33:27.000Z | localstack/utils/aws/templating.py | rubencosta/localstack | 369f65f1635c09934e23e3dbca54fbdb31cb7b74 | [
"Apache-2.0"
] | 5,216 | 2017-07-04T11:45:41.000Z | 2022-03-31T22:02:14.000Z | localstack/utils/aws/templating.py | lambdafunc/localstack | 6285b43bec57435a2179310a8de2af8d8d8cf8dd | [
"Apache-2.0"
] | 3,056 | 2017-06-05T13:29:11.000Z | 2022-03-31T20:54:43.000Z | import base64
import json
import re
from six.moves.urllib.parse import quote_plus, unquote_plus
from localstack import config
from localstack.utils.common import (
extract_jsonpath,
is_number,
json_safe,
recurse_object,
short_uid,
to_number,
)
class VelocityInput(object):
"""Simple class to mimick the behavior of variable '$input' in AWS API Gateway integration velocity templates.
See: http://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-mapping-template-reference.html"""
def __init__(self, value):
self.value = value
def path(self, path):
if not self.value:
return {}
value = self.value if isinstance(self.value, dict) else json.loads(self.value)
return extract_jsonpath(value, path)
def json(self, path):
path = path or "$"
matching = self.path(path)
if isinstance(matching, (list, dict)):
matching = json_safe(matching)
return json.dumps(matching)
def __getattr__(self, name):
return self.value.get(name)
def __repr__(self):
return "$input"
class VelocityUtil(object):
"""Simple class to mimick the behavior of variable '$util' in AWS API Gateway integration velocity templates.
See: http://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-mapping-template-reference.html"""
def base64Encode(self, s):
if not isinstance(s, str):
s = json.dumps(s)
encoded_str = s.encode(config.DEFAULT_ENCODING)
encoded_b64_str = base64.b64encode(encoded_str)
return encoded_b64_str.decode(config.DEFAULT_ENCODING)
def base64Decode(self, s):
if not isinstance(s, str):
s = json.dumps(s)
return base64.b64decode(s)
def toJson(self, obj):
return obj and json.dumps(obj)
def urlEncode(self, s):
return quote_plus(s)
def urlDecode(self, s):
return unquote_plus(s)
def escapeJavaScript(self, s):
try:
return json.dumps(json.loads(s))
except Exception:
primitive_types = (str, int, bool, float, type(None))
s = s if isinstance(s, primitive_types) else str(s)
if str(s).strip() in ["true", "false"]:
s = bool(s)
elif s not in [True, False] and is_number(s):
s = to_number(s)
return json.dumps(s)
def render_velocity_template(template, context, variables=None, as_json=False):
if variables is None:
variables = {}
import airspeed
if not template:
return template
# Apply a few fixes below, to properly prepare the template...
# TODO: remove once this PR is merged: https://github.com/purcell/airspeed/pull/48
def expr_parse(self):
try:
self.identity_match(self.DOT)
self.expression = self.next_element(airspeed.VariableExpression)
except airspeed.NoMatch:
self.expression = self.next_element(airspeed.ArrayIndex)
self.subexpression = None
try:
self.subexpression = self.next_element(airspeed.SubExpression)
except airspeed.NoMatch:
pass
airspeed.SubExpression.parse = expr_parse
# TODO: remove once this PR is merged: https://github.com/purcell/airspeed/pull/48
def expr_calculate(self, current_object, loader, global_namespace):
args = [current_object, loader]
if not isinstance(self.expression, airspeed.ArrayIndex):
return self.expression.calculate(*(args + [global_namespace]))
index = self.expression.calculate(*args)
result = current_object[index]
if self.subexpression:
result = self.subexpression.calculate(result, loader, global_namespace)
return result
airspeed.SubExpression.calculate = expr_calculate
# fix "#set" commands
template = re.sub(r"(^|\n)#\s+set(.*)", r"\1#set\2", template, re.MULTILINE)
# enable syntax like "test#${foo.bar}"
empty_placeholder = " __pLaCe-HoLdEr__ "
template = re.sub(
r"([^\s]+)#\$({)?(.*)",
r"\1#%s$\2\3" % empty_placeholder,
template,
re.MULTILINE,
)
# add extensions for common string functions below
class ExtendedString(str):
def trim(self, *args, **kwargs):
return ExtendedString(self.strip(*args, **kwargs))
def toLowerCase(self, *args, **kwargs):
return ExtendedString(self.lower(*args, **kwargs))
def toUpperCase(self, *args, **kwargs):
return ExtendedString(self.upper(*args, **kwargs))
def apply(obj, **kwargs):
if isinstance(obj, dict):
for k, v in obj.items():
if isinstance(v, str):
obj[k] = ExtendedString(v)
return obj
# loop through the variables and enable certain additional util functions (e.g., string utils)
variables = variables or {}
recurse_object(variables, apply)
# prepare and render template
context_var = variables.get("context") or {}
context_var.setdefault("requestId", short_uid())
t = airspeed.Template(template)
var_map = {
"input": VelocityInput(context),
"util": VelocityUtil(),
"context": context_var,
}
var_map.update(variables or {})
replaced = t.merge(var_map)
# revert temporary changes from the fixes above
replaced = replaced.replace(empty_placeholder, "")
if as_json:
replaced = json.loads(replaced)
return replaced
| 32.034483 | 115 | 0.63563 |
793ef5546b7f80dc4c2e2b0007425f7aa6dfdb46 | 757 | py | Python | loggingdemo/example02.py | ErinKenna/loggingdemo | 7f2490b1e1062030785310d4d48eeddf5e19e07c | [
"Unlicense"
] | null | null | null | loggingdemo/example02.py | ErinKenna/loggingdemo | 7f2490b1e1062030785310d4d48eeddf5e19e07c | [
"Unlicense"
] | null | null | null | loggingdemo/example02.py | ErinKenna/loggingdemo | 7f2490b1e1062030785310d4d48eeddf5e19e07c | [
"Unlicense"
] | null | null | null | """ Refactor the divide method into a separate module.
"""
import itertools
import logging.config
import os
import pathlib
import yaml
from loggingdemo.utils import divide
app_dir = pathlib.Path(os.path.abspath(os.path.join(os.path.dirname(__file__), '../')))
log_config_fn = 'logging.yml'
__LOG_CONFIG_PATH = pathlib.Path(app_dir / log_config_fn)
with open(__LOG_CONFIG_PATH, 'rt') as f:
log_config = yaml.safe_load(f)
logging.config.dictConfig(log_config)
log = logging.getLogger(__name__)
numbers = [1, 2, 3.5, 0]
log.info('Divide all combinations (include zero in the mix)')
for n, d in itertools.combinations(numbers, 2):
divide_result = divide(n, d)
print(f'Dividing {n} by {d}\tGives {divide_result}')
log.info('Script complete')
| 25.233333 | 87 | 0.739762 |
793ef571bcdf31ca247d0f071447e377f2b8f1ff | 415 | py | Python | fairseq/data/optims_dataset.py | khyathiraghavi/roberta | 5d55faf79f6c0181dacfe5dd52e6165e8c9a0555 | [
"MIT"
] | null | null | null | fairseq/data/optims_dataset.py | khyathiraghavi/roberta | 5d55faf79f6c0181dacfe5dd52e6165e8c9a0555 | [
"MIT"
] | null | null | null | fairseq/data/optims_dataset.py | khyathiraghavi/roberta | 5d55faf79f6c0181dacfe5dd52e6165e8c9a0555 | [
"MIT"
] | null | null | null | import numpy as np
import torch
from . import BaseWrapperDataset
class OptimsDataset(BaseWrapperDataset):
def __init__(self, dataset):
super().__init__(dataset)
self.dataset = dataset
def __getitem__(self, index):
item = self.dataset[index]
return item
def __len__(self):
return len(self.dataset)
def collater(self, samples):
return samples
| 16.6 | 40 | 0.655422 |
793ef6d82f9b9e1e76ea4f649407e6a16207846c | 205 | py | Python | leads/serializers.py | AlteredArt/jobjot | eabd8860e0bbfce8eae18b5914b66650b5b160eb | [
"MIT"
] | 1 | 2020-05-14T03:04:19.000Z | 2020-05-14T03:04:19.000Z | leads/serializers.py | AlteredArt/jobjot | eabd8860e0bbfce8eae18b5914b66650b5b160eb | [
"MIT"
] | null | null | null | leads/serializers.py | AlteredArt/jobjot | eabd8860e0bbfce8eae18b5914b66650b5b160eb | [
"MIT"
] | null | null | null | from rest_framework import serializers
from leads.models import Lead
# this is the Lead Serializer
class LeadSerializer(serializers.ModelSerializer):
class Meta:
model = Lead
fields = '__all__'
| 22.777778 | 50 | 0.77561 |
793ef827065c5230110dab9091816914dd233ccc | 11,681 | py | Python | SKTBert/data_loader.py | yeongjoon/NER | d2c93597726ed9507bfddea9197007d30aeaad8b | [
"Apache-2.0"
] | 1 | 2020-08-05T00:17:01.000Z | 2020-08-05T00:17:01.000Z | SKTBert/data_loader.py | yeongjoon/NER | d2c93597726ed9507bfddea9197007d30aeaad8b | [
"Apache-2.0"
] | null | null | null | SKTBert/data_loader.py | yeongjoon/NER | d2c93597726ed9507bfddea9197007d30aeaad8b | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Named entity recognition fine-tuning: utilities to work with CoNLL-2003 task. """
import logging
import os
import copy
import json
import torch
from torch.utils.data import TensorDataset
logger = logging.getLogger(__name__)
class InputExample(object):
"""A single training/test example for token classification."""
def __init__(self, guid, words, labels):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
words: list. The words of the sequence.
labels: (Optional) list. The labels for each word of the sequence. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.words = words
self.labels = labels
class InputFeatures(object):
"""A single set of features of data."""
def __init__(self, input_ids, input_mask, segment_ids, label_ids):
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.label_ids = label_ids
def load_and_cache_examples(args, tokenizer, labels, pad_token_label_id, mode):
# 이걸 Train 때만 막는게 맞을지 아니면 Evaluation할 때도 막는게 맞을 지 잘 모르겠다.
if args.local_rank not in [-1, 0]:
torch.distributed.barrier() # Make sure only the first process in distributed training process the dataset, and the others will use the cache
# Load data features from cache or dataset file
cached_features_file = os.path.join(
args.data_dir,
"cached_{}_{}_{}".format(
mode, list(filter(None, args.model_name_or_path.split("/"))).pop(), str(args.max_seq_length)
),
)
if os.path.exists(cached_features_file) and not args.overwrite_cache:
logger.info("Loading features from cached file %s", cached_features_file)
features = torch.load(cached_features_file)
else:
logger.info("Creating features from dataset file at %s", args.data_dir)
examples = read_examples_from_file(args.data_dir, mode)
features = convert_examples_to_features(
examples,
labels,
args.max_seq_length,
tokenizer,
cls_token_at_end=bool(args.model_type in ["xlnet"]),
# xlnet has a cls token at the end
cls_token=tokenizer.cls_token,
cls_token_segment_id=2 if args.model_type in ["xlnet"] else 0,
sep_token=tokenizer.sep_token,
sep_token_extra=bool(args.model_type in ["roberta"]),
# roberta uses an extra separator b/w pairs of sentences, cf. github.com/pytorch/fairseq/commit/1684e166e3da03f5b600dbb7855cb98ddfcd0805
pad_on_left=bool(args.model_type in ["xlnet"]),
# pad on the left for xlnet
pad_token=tokenizer.pad_token_id,
pad_token_segment_id=tokenizer.pad_token_type_id,
pad_token_label_id=pad_token_label_id,
)
if args.local_rank in [-1, 0]:
logger.info("Saving features into cached file %s", cached_features_file)
torch.save(features, cached_features_file)
if args.local_rank == 0 and False:
torch.distributed.barrier() # Make sure only the first process in distributed training process the dataset, and the others will use the cache
# Convert to Tensors and build dataset
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_input_mask = torch.tensor([f.input_mask for f in features], dtype=torch.long)
all_segment_ids = torch.tensor([f.segment_ids for f in features], dtype=torch.long)
all_label_ids = torch.tensor([f.label_ids for f in features], dtype=torch.long)
dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_label_ids)
return dataset
def read_examples_from_file(data_dir, mode):
file_path = os.path.join(data_dir, "{}.txt".format(mode))
guid_index = 1
examples = []
with open(file_path, encoding="utf-8") as f:
words = []
labels = []
for line in f:
if line.startswith("-DOCSTART-") or line == "" or line == "\n":
if words:
examples.append(InputExample(guid="{}-{}".format(mode, guid_index), words=words, labels=labels))
guid_index += 1
words = []
labels = []
else:
splits = line.split(" ")
words.append(splits[0])
if len(splits) > 1:
labels.append(splits[-1].replace("\n", ""))
else:
# Examples could have no label for mode = "test"
labels.append("O")
if words:
examples.append(InputExample(guid="{}-{}".format(mode, guid_index), words=words, labels=labels))
return examples
def convert_examples_to_features(
examples,
label_list,
max_seq_length,
tokenizer,
cls_token_at_end=False,
cls_token="[CLS]",
cls_token_segment_id=1,
sep_token="[SEP]",
sep_token_extra=False,
pad_on_left=False,
pad_token=0,
pad_token_segment_id=0,
pad_token_label_id=-100,
sequence_a_segment_id=0,
mask_padding_with_zero=True,
):
""" Loads a data file into a list of `InputBatch`s
`cls_token_at_end` define the location of the CLS token:
- False (Default, BERT/XLM pattern): [CLS] + A + [SEP] + B + [SEP]
- True (XLNet/GPT pattern): A + [SEP] + B + [SEP] + [CLS]
`cls_token_segment_id` define the segment id associated to the CLS token (0 for BERT, 2 for XLNet)
"""
label_map = {label: i for i, label in enumerate(label_list)}
features = []
for (ex_index, example) in enumerate(examples):
if ex_index % 10000 == 0:
logger.info("Writing example %d of %d", ex_index, len(examples))
tokens = []
label_ids = []
for word, label in zip(example.words, example.labels):
word_tokens = tokenizer.tokenize(word)
# bert-base-multilingual-cased sometimes output "nothing ([]) when calling tokenize with just a space.
if len(word_tokens) > 0:
tokens.extend(word_tokens)
# Use the real label id for the first token of the word, and padding ids for the remaining tokens
label_ids.extend([label_map[label]] + [pad_token_label_id] * (len(word_tokens) - 1))
# Account for [CLS] and [SEP] with "- 2" and with "- 3" for RoBERTa.
special_tokens_count = tokenizer.num_added_tokens()
if len(tokens) > max_seq_length - special_tokens_count:
tokens = tokens[: (max_seq_length - special_tokens_count)]
label_ids = label_ids[: (max_seq_length - special_tokens_count)]
# The convention in BERT is:
# (a) For sequence pairs:
# tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]
# type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1
# (b) For single sequences:
# tokens: [CLS] the dog is hairy . [SEP]
# type_ids: 0 0 0 0 0 0 0
#
# Where "type_ids" are used to indicate whether this is the first
# sequence or the second sequence. The embedding vectors for `type=0` and
# `type=1` were learned during pre-training and are added to the wordpiece
# embedding vector (and position vector). This is not *strictly* necessary
# since the [SEP] token unambiguously separates the sequences, but it makes
# it easier for the model to learn the concept of sequences.
#
# For classification tasks, the first vector (corresponding to [CLS]) is
# used as as the "sentence vector". Note that this only makes sense because
# the entire model is fine-tuned.
tokens += [sep_token]
label_ids += [pad_token_label_id]
if sep_token_extra:
# roberta uses an extra separator b/w pairs of sentences
tokens += [sep_token]
label_ids += [pad_token_label_id]
segment_ids = [sequence_a_segment_id] * len(tokens)
if cls_token_at_end:
tokens += [cls_token]
label_ids += [pad_token_label_id]
segment_ids += [cls_token_segment_id]
else:
tokens = [cls_token] + tokens
label_ids = [pad_token_label_id] + label_ids
segment_ids = [cls_token_segment_id] + segment_ids
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
input_mask = [1 if mask_padding_with_zero else 0] * len(input_ids)
# Zero-pad up to the sequence length.
padding_length = max_seq_length - len(input_ids)
if pad_on_left:
input_ids = ([pad_token] * padding_length) + input_ids
input_mask = ([0 if mask_padding_with_zero else 1] * padding_length) + input_mask
segment_ids = ([pad_token_segment_id] * padding_length) + segment_ids
label_ids = ([pad_token_label_id] * padding_length) + label_ids
else:
input_ids += [pad_token] * padding_length
input_mask += [0 if mask_padding_with_zero else 1] * padding_length
segment_ids += [pad_token_segment_id] * padding_length
label_ids += [pad_token_label_id] * padding_length
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
assert len(label_ids) == max_seq_length
if ex_index < 5:
logger.info("*** Example ***")
logger.info("guid: %s", example.guid)
logger.info("tokens: %s", " ".join([str(x) for x in tokens]))
logger.info("input_ids: %s", " ".join([str(x) for x in input_ids]))
logger.info("input_mask: %s", " ".join([str(x) for x in input_mask]))
logger.info("segment_ids: %s", " ".join([str(x) for x in segment_ids]))
logger.info("label_ids: %s", " ".join([str(x) for x in label_ids]))
features.append(
InputFeatures(input_ids=input_ids, input_mask=input_mask, segment_ids=segment_ids, label_ids=label_ids)
)
return features
def get_labels(path):
if path:
with open(path, "r") as f:
labels = f.read().splitlines()
if "O" not in labels:
labels = ["O"] + labels
return labels
else:
return ["O", "B-MISC", "I-MISC", "B-PER", "I-PER", "B-ORG", "I-ORG", "B-LOC", "I-LOC"] | 43.585821 | 151 | 0.615358 |
793ef95a095fc2b205dde0ff5a6bf126f082d04d | 205,495 | py | Python | packages/qgs_project_template.py | jphuart/swatplus-automatic-workflow | dd2eeb7f882eb2d4ab7e1e5265c10b9beb93ddc4 | [
"MIT"
] | 8 | 2020-06-28T07:50:29.000Z | 2022-01-05T16:29:48.000Z | packages/qgs_project_template.py | jphuart/swatplus-automatic-workflow | dd2eeb7f882eb2d4ab7e1e5265c10b9beb93ddc4 | [
"MIT"
] | null | null | null | packages/qgs_project_template.py | jphuart/swatplus-automatic-workflow | dd2eeb7f882eb2d4ab7e1e5265c10b9beb93ddc4 | [
"MIT"
] | 5 | 2020-06-28T07:50:31.000Z | 2021-08-16T07:09:59.000Z |
'''
date : 31/03/2020
description : this is a template for qgs file
author : Celray James CHAWANDA
contact : celray.chawanda@outlook.com
licence : MIT 2020
'''
template = '''<!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>
<qgis projectname="{project_name}" version="3.10.10-A Coruña">
<homePath path=""/>
<title>{project_name}</title>
<autotransaction active="0"/>
<evaluateDefaultValues active="0"/>
<trust active="0"/>
<projectCrs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</projectCrs>
<layer-tree-group>
<customproperties/>
<layer-tree-group checked="Qt::Checked" expanded="1" name="Animations">
<customproperties/>
</layer-tree-group>
<layer-tree-group checked="Qt::Checked" expanded="1" name="Results">
<customproperties/>
</layer-tree-group>
<layer-tree-group checked="Qt::Checked" expanded="1" name="Watershed">
<customproperties/>
<layer-tree-layer id="Subbasins__subs1__3017a81e_0174_439c_b815_cf54de0e0667" source="./Watershed/Shapes/subs1.shp" checked="Qt::Checked" expanded="0" providerKey="ogr" name="Subbasins (subs1)">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="Pt_sources_and_reservoirs__reservoirs2__ada5d781_850f_43ac_825b_b807e28299e4" source="./Watershed/Shapes/reservoirs1.shp" checked="Qt::Checked" expanded="1" providerKey="ogr" name="Pt sources and reservoirs (reservoirs2)">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="Snapped_inlets_outlets__{outlet_name}_snap__2a54eb19_3da0_420d_b964_e4cd8efd371f" source="./Watershed/Shapes/{outlet_name}_snap.shp" checked="Qt::Checked" expanded="1" providerKey="ogr" name="Snapped inlets/outlets ({outlet_name}_snap)">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="Drawn_inlets_outlets__{outlet_name}__c41cb90c_f1d6_4ffe_8a64_99bcb575d961" source="./Watershed/Shapes/{outlet_name}.shp" checked="Qt::Unchecked" expanded="1" providerKey="ogr" name="Drawn inlets/outlets ({outlet_name})">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="Inlets_outlets__{outlet_name}__0c49465a_2a2b_4ecb_ae4f_fbb60c4c1bcb" source="./Watershed/Shapes/{outlet_name}.shp" checked="Qt::Checked" expanded="1" providerKey="ogr" name="Inlets/outlets ({outlet_name})">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="Streams__{dem_name}stream__6a837462_9d7d_48f0_a6c1_1710f553d03b" source="./Watershed/Shapes/{dem_name}stream.shp" checked="Qt::Checked" expanded="1" providerKey="ogr" name="Streams ({dem_name}stream)">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="Channel_reaches__rivs1__514d2d76_3dcd_4834_8bd4_42392284ab2f" source="./Watershed/Shapes/rivs1.shp" checked="Qt::Checked" expanded="1" providerKey="ogr" name="Channel reaches (rivs1)">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="Channels__{dem_name}channel__a7e3608c_b71d_44f6_8194_67e56bb7c543" source="./Watershed/Shapes/{dem_name}channel.shp" checked="Qt::Unchecked" expanded="1" providerKey="ogr" name="Channels ({dem_name}channel)">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="Full_LSUs__lsus1__8f4e9cfb_3ca6_4a70_83b9_fe977379bcf4" source="./Watershed/Shapes/lsus1.shp" checked="Qt::Checked" expanded="1" providerKey="ogr" name="Full LSUs (lsus1)">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="Actual_HRUs__hrus2__7adc36e1_3c7f_40db_8b2c_bb4f79fa3338" source="./Watershed/Shapes/hrus2.shp" checked="Qt::Checked" expanded="1" providerKey="ogr" name="Actual HRUs (hrus2)">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="Full_HRUs__hrus1__4e2ba365_e7bd_4f8e_9d6c_79056945afb5" source="./Watershed/Shapes/hrus1.shp" checked="Qt::Unchecked" expanded="1" providerKey="ogr" name="Full HRUs (hrus1)">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="Hillshade__{dem_name}hillshade__a6f33483_65e8_4cde_a966_948ff13f0c2a" source="./Watershed/Rasters/DEM/{dem_name}hillshade.tif" checked="Qt::Checked" expanded="0" providerKey="gdal" name="Hillshade ({dem_name}hillshade)">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer id="DEM__{dem_name}__f751ab49_fdac_4766_be7f_300fbfe6adf2" source="./Watershed/Rasters/DEM/{dem_file_name}" checked="Qt::Checked" expanded="1" providerKey="gdal" name="DEM ({dem_name})">
<customproperties/>
</layer-tree-layer>
</layer-tree-group>
<layer-tree-group checked="Qt::Checked" expanded="1" name="Landuse">
<customproperties/>
<layer-tree-layer id="Landuses__{landuse_name}__f7ec5ca9_3dce_4d3e_8def_9e31ecc6c163" source="./Watershed/Rasters/Landuse/{landuse_file_name}" checked="Qt::Checked" expanded="1" providerKey="gdal" name="Landuses ({landuse_name})">
<customproperties/>
</layer-tree-layer>
</layer-tree-group>
<layer-tree-group checked="Qt::Checked" expanded="1" name="Soil">
<customproperties/>
<layer-tree-layer id="Soils__{soil_name}_tif__2cd25288_d1b5_4e76_83af_39034c9f7ffd" source="./Watershed/Rasters/Soil/{soil_file_name}" checked="Qt::Checked" expanded="1" providerKey="gdal" name="Soils ({soil_name})">
<customproperties/>
</layer-tree-layer>
</layer-tree-group>
<layer-tree-group checked="Qt::Checked" expanded="1" name="Slope">
<customproperties/>
<layer-tree-layer id="Slope_bands__{dem_name}slp_bands__daa1ee9a_d352_4de4_a12e_21aa0143f677" source="./Watershed/Rasters/DEM/{dem_name}slp_bands.tif" checked="Qt::Checked" expanded="1" providerKey="gdal" name="Slope bands ({dem_name}slp_bands)">
<customproperties/>
</layer-tree-layer>
</layer-tree-group>
<custom-order enabled="0">
<item>DEM__{dem_name}__f751ab49_fdac_4766_be7f_300fbfe6adf2</item>
<item>Hillshade__{dem_name}hillshade__a6f33483_65e8_4cde_a966_948ff13f0c2a</item>
<item>Inlets_outlets__{outlet_name}__0c49465a_2a2b_4ecb_ae4f_fbb60c4c1bcb</item>
<item>Drawn_inlets_outlets__{outlet_name}__c41cb90c_f1d6_4ffe_8a64_99bcb575d961</item>
<item>Landuses__{landuse_name}__f7ec5ca9_3dce_4d3e_8def_9e31ecc6c163</item>
<item>Soils__{soil_name}_tif__2cd25288_d1b5_4e76_83af_39034c9f7ffd</item>
<item>Snapped_inlets_outlets__{outlet_name}_snap__2a54eb19_3da0_420d_b964_e4cd8efd371f</item>
<item>Streams__{dem_name}stream__6a837462_9d7d_48f0_a6c1_1710f553d03b</item>
<item>Channels__{dem_name}channel__a7e3608c_b71d_44f6_8194_67e56bb7c543</item>
<item>Full_LSUs__lsus1__8f4e9cfb_3ca6_4a70_83b9_fe977379bcf4</item>
<item>Full_HRUs__hrus1__4e2ba365_e7bd_4f8e_9d6c_79056945afb5</item>
<item>Slope_bands__{dem_name}slp_bands__daa1ee9a_d352_4de4_a12e_21aa0143f677</item>
<item>Pt_sources_and_reservoirs__reservoirs2__ada5d781_850f_43ac_825b_b807e28299e4</item>
<item>Channel_reaches__rivs1__514d2d76_3dcd_4834_8bd4_42392284ab2f</item>
<item>Actual_HRUs__hrus2__7adc36e1_3c7f_40db_8b2c_bb4f79fa3338</item>
<item>Subbasins__subs1__3017a81e_0174_439c_b815_cf54de0e0667</item>
</custom-order>
</layer-tree-group>
<snapping-settings enabled="0" intersection-snapping="0" unit="1" mode="2" tolerance="12" type="1">
<individual-layer-settings>
<layer-setting id="Snapped_inlets_outlets__{outlet_name}_snap__2a54eb19_3da0_420d_b964_e4cd8efd371f" enabled="0" tolerance="12" type="1" units="1"/>
<layer-setting id="Full_LSUs__lsus1__8f4e9cfb_3ca6_4a70_83b9_fe977379bcf4" enabled="0" tolerance="12" type="1" units="1"/>
<layer-setting id="Drawn_inlets_outlets__{outlet_name}__c41cb90c_f1d6_4ffe_8a64_99bcb575d961" enabled="0" tolerance="12" type="1" units="1"/>
<layer-setting id="Actual_HRUs__hrus2__7adc36e1_3c7f_40db_8b2c_bb4f79fa3338" enabled="0" tolerance="12" type="1" units="1"/>
<layer-setting id="Subbasins__subs1__3017a81e_0174_439c_b815_cf54de0e0667" enabled="0" tolerance="12" type="1" units="1"/>
<layer-setting id="Streams__{dem_name}stream__6a837462_9d7d_48f0_a6c1_1710f553d03b" enabled="0" tolerance="12" type="1" units="1"/>
<layer-setting id="Channels__{dem_name}channel__a7e3608c_b71d_44f6_8194_67e56bb7c543" enabled="0" tolerance="12" type="1" units="1"/>
<layer-setting id="Full_HRUs__hrus1__4e2ba365_e7bd_4f8e_9d6c_79056945afb5" enabled="0" tolerance="12" type="1" units="1"/>
<layer-setting id="Pt_sources_and_reservoirs__reservoirs2__ada5d781_850f_43ac_825b_b807e28299e4" enabled="0" tolerance="12" type="1" units="1"/>
<layer-setting id="Inlets_outlets__{outlet_name}__0c49465a_2a2b_4ecb_ae4f_fbb60c4c1bcb" enabled="0" tolerance="12" type="1" units="1"/>
<layer-setting id="Channel_reaches__rivs1__514d2d76_3dcd_4834_8bd4_42392284ab2f" enabled="0" tolerance="12" type="1" units="1"/>
</individual-layer-settings>
</snapping-settings>
<relations/>
<mapcanvas name="theMapCanvas" annotationsVisible="1">
<units>meters</units>
<extent>
<xmin>325042.12329483608482406</xmin>
<ymin>1286510.87989890901371837</ymin>
<xmax>338444.10459518124116585</xmax>
<ymax>1293740.12989890901371837</ymax>
</extent>
<rotation>0</rotation>
<destinationsrs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</destinationsrs>
<rendermaptile>0</rendermaptile>
<expressionContextScope/>
</mapcanvas>
<projectModels/>
<legend updateDrawingOrder="true">
<legendgroup checked="Qt::Checked" open="true" name="Animations"/>
<legendgroup checked="Qt::Checked" open="true" name="Results"/>
<legendgroup checked="Qt::Checked" open="true" name="Watershed">
<legendlayer checked="Qt::Checked" open="false" showFeatureCount="0" drawingOrder="-1" name="Subbasins (subs1)">
<filegroup open="false" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Subbasins__subs1__3017a81e_0174_439c_b815_cf54de0e0667"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Checked" open="true" showFeatureCount="0" drawingOrder="-1" name="Pt sources and reservoirs (reservoirs2)">
<filegroup open="true" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Pt_sources_and_reservoirs__reservoirs2__ada5d781_850f_43ac_825b_b807e28299e4"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Checked" open="true" showFeatureCount="0" drawingOrder="-1" name="Snapped inlets/outlets ({outlet_name}_snap)">
<filegroup open="true" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Snapped_inlets_outlets__{outlet_name}_snap__2a54eb19_3da0_420d_b964_e4cd8efd371f"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Unchecked" open="true" showFeatureCount="0" drawingOrder="-1" name="Drawn inlets/outlets ({outlet_name})">
<filegroup open="true" hidden="false">
<legendlayerfile visible="0" isInOverview="0" layerid="Drawn_inlets_outlets__{outlet_name}__c41cb90c_f1d6_4ffe_8a64_99bcb575d961"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Checked" open="true" showFeatureCount="0" drawingOrder="-1" name="Inlets/outlets ({outlet_name})">
<filegroup open="true" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Inlets_outlets__{outlet_name}__0c49465a_2a2b_4ecb_ae4f_fbb60c4c1bcb"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Checked" open="true" showFeatureCount="0" drawingOrder="-1" name="Streams ({dem_name}stream)">
<filegroup open="true" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Streams__{dem_name}stream__6a837462_9d7d_48f0_a6c1_1710f553d03b"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Checked" open="true" showFeatureCount="0" drawingOrder="-1" name="Channel reaches (rivs1)">
<filegroup open="true" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Channel_reaches__rivs1__514d2d76_3dcd_4834_8bd4_42392284ab2f"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Unchecked" open="true" showFeatureCount="0" drawingOrder="-1" name="Channels ({dem_name}channel)">
<filegroup open="true" hidden="false">
<legendlayerfile visible="0" isInOverview="0" layerid="Channels__{dem_name}channel__a7e3608c_b71d_44f6_8194_67e56bb7c543"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Checked" open="true" showFeatureCount="0" drawingOrder="-1" name="Full LSUs (lsus1)">
<filegroup open="true" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Full_LSUs__lsus1__8f4e9cfb_3ca6_4a70_83b9_fe977379bcf4"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Checked" open="true" showFeatureCount="0" drawingOrder="-1" name="Actual HRUs (hrus2)">
<filegroup open="true" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Actual_HRUs__hrus2__7adc36e1_3c7f_40db_8b2c_bb4f79fa3338"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Unchecked" open="true" showFeatureCount="0" drawingOrder="-1" name="Full HRUs (hrus1)">
<filegroup open="true" hidden="false">
<legendlayerfile visible="0" isInOverview="0" layerid="Full_HRUs__hrus1__4e2ba365_e7bd_4f8e_9d6c_79056945afb5"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Checked" open="false" showFeatureCount="0" drawingOrder="-1" name="Hillshade ({dem_name}hillshade)">
<filegroup open="false" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Hillshade__{dem_name}hillshade__a6f33483_65e8_4cde_a966_948ff13f0c2a"/>
</filegroup>
</legendlayer>
<legendlayer checked="Qt::Checked" open="true" showFeatureCount="0" drawingOrder="-1" name="DEM ({dem_name})">
<filegroup open="true" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="DEM__{dem_name}__f751ab49_fdac_4766_be7f_300fbfe6adf2"/>
</filegroup>
</legendlayer>
</legendgroup>
<legendgroup checked="Qt::Checked" open="true" name="Landuse">
<legendlayer checked="Qt::Checked" open="true" showFeatureCount="0" drawingOrder="-1" name="Landuses ({landuse_name})">
<filegroup open="true" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Landuses__{landuse_name}__f7ec5ca9_3dce_4d3e_8def_9e31ecc6c163"/>
</filegroup>
</legendlayer>
</legendgroup>
<legendgroup checked="Qt::Checked" open="true" name="Soil">
<legendlayer checked="Qt::Checked" open="true" showFeatureCount="0" drawingOrder="-1" name="Soils ({soil_name})">
<filegroup open="true" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Soils__{soil_name}_tif__2cd25288_d1b5_4e76_83af_39034c9f7ffd"/>
</filegroup>
</legendlayer>
</legendgroup>
<legendgroup checked="Qt::Checked" open="true" name="Slope">
<legendlayer checked="Qt::Checked" open="true" showFeatureCount="0" drawingOrder="-1" name="Slope bands ({dem_name}slp_bands)">
<filegroup open="true" hidden="false">
<legendlayerfile visible="1" isInOverview="0" layerid="Slope_bands__{dem_name}slp_bands__daa1ee9a_d352_4de4_a12e_21aa0143f677"/>
</filegroup>
</legendlayer>
</legendgroup>
</legend>
<mapViewDocks/>
<mapViewDocks3D/>
<projectlayers>
<maplayer geometry="Polygon" maxScale="-4.65661e-10" refreshOnNotifyEnabled="0" type="vector" styleCategories="AllStyleCategories" simplifyDrawingHints="1" simplifyMaxScale="1" autoRefreshEnabled="0" labelsEnabled="1" simplifyLocal="1" wkbType="MultiPolygon" minScale="1e+08" simplifyDrawingTol="1" refreshOnNotifyMessage="" readOnly="0" hasScaleBasedVisibilityFlag="0" simplifyAlgorithm="0" autoRefreshTime="0">
<extent>
<xmin>328825.8826469536870718</xmin>
<ymin>1287329.26022111857309937</ymin>
<xmax>336355.8826469536870718</xmax>
<ymax>1292189.26022111857309937</ymax>
</extent>
<id>Actual_HRUs__hrus2__7adc36e1_3c7f_40db_8b2c_bb4f79fa3338</id>
<datasource>./Watershed/Shapes/hrus2.shp</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Actual HRUs (hrus2)</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type>dataset</type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider encoding="UTF-8">ogr</provider>
<vectorjoins/>
<layerDependencies/>
<dataDependencies/>
<legend type="default-vector"/>
<expressionfields/>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<auxiliaryLayer/>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<renderer-v2 enableorderby="0" forceraster="0" symbollevels="0" type="singleSymbol">
<symbols>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="fill" name="0">
<layer pass="0" locked="0" enabled="1" class="SimpleFill">
<prop k="border_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="color" v="220,255,212,255"/>
<prop k="joinstyle" v="bevel"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0.06"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="style" v="solid"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
</symbols>
<rotation/>
<sizescale/>
</renderer-v2>
<customproperties/>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerOpacity>1</layerOpacity>
<geometryOptions geometryPrecision="0" removeDuplicateNodes="0">
<activeChecks/>
<checkConfiguration/>
</geometryOptions>
<fieldConfiguration>
<field name="Subbasin">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Channel">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Landscape">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Landuse">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Soil">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="SlopeBand">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Area">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="%Subbasin">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="%Landscape">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="HRUS">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="LINKNO">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
</fieldConfiguration>
<aliases>
<alias field="Subbasin" index="0" name=""/>
<alias field="Channel" index="1" name=""/>
<alias field="Landscape" index="2" name=""/>
<alias field="Landuse" index="3" name=""/>
<alias field="Soil" index="4" name=""/>
<alias field="SlopeBand" index="5" name=""/>
<alias field="Area" index="6" name=""/>
<alias field="%Subbasin" index="7" name=""/>
<alias field="%Landscape" index="8" name=""/>
<alias field="HRUS" index="9" name=""/>
<alias field="LINKNO" index="10" name=""/>
</aliases>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<defaults>
<default field="Subbasin" applyOnUpdate="0" expression=""/>
<default field="Channel" applyOnUpdate="0" expression=""/>
<default field="Landscape" applyOnUpdate="0" expression=""/>
<default field="Landuse" applyOnUpdate="0" expression=""/>
<default field="Soil" applyOnUpdate="0" expression=""/>
<default field="SlopeBand" applyOnUpdate="0" expression=""/>
<default field="Area" applyOnUpdate="0" expression=""/>
<default field="%Subbasin" applyOnUpdate="0" expression=""/>
<default field="%Landscape" applyOnUpdate="0" expression=""/>
<default field="HRUS" applyOnUpdate="0" expression=""/>
<default field="LINKNO" applyOnUpdate="0" expression=""/>
</defaults>
<constraints>
<constraint exp_strength="0" field="Subbasin" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Channel" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Landscape" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Landuse" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Soil" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="SlopeBand" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Area" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="%Subbasin" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="%Landscape" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="HRUS" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="LINKNO" unique_strength="0" notnull_strength="0" constraints="0"/>
</constraints>
<constraintExpressions>
<constraint field="Subbasin" exp="" desc=""/>
<constraint field="Channel" exp="" desc=""/>
<constraint field="Landscape" exp="" desc=""/>
<constraint field="Landuse" exp="" desc=""/>
<constraint field="Soil" exp="" desc=""/>
<constraint field="SlopeBand" exp="" desc=""/>
<constraint field="Area" exp="" desc=""/>
<constraint field="%Subbasin" exp="" desc=""/>
<constraint field="%Landscape" exp="" desc=""/>
<constraint field="HRUS" exp="" desc=""/>
<constraint field="LINKNO" exp="" desc=""/>
</constraintExpressions>
<expressionfields/>
<attributeactions>
<defaultAction value--open-curly--00000000-0000-0000-0000-000000000000--close-curly--" key="Canvas"/>
</attributeactions>
<attributetableconfig sortExpression="" sortOrder="0" actionWidgetStyle="dropDown">
<columns/>
</attributetableconfig>
<conditionalstyles>
<rowstyles/>
<fieldstyles/>
</conditionalstyles>
<storedexpressions/>
<editform tolerant="1">../../../Documents</editform>
<editforminit/>
<editforminitcodesource>0</editforminitcodesource>
<editforminitfilepath></editforminitfilepath>
<editforminitcode><![CDATA[]]></editforminitcode>
<featformsuppress>0</featformsuppress>
<editorlayout>generatedlayout</editorlayout>
<editable/>
<labelOnTop/>
<widgets/>
<previewExpression>"SUBBASIN"</previewExpression>
<mapTip></mapTip>
</maplayer>
<maplayer geometry="Line" maxScale="0" refreshOnNotifyEnabled="0" type="vector" styleCategories="AllStyleCategories" simplifyDrawingHints="0" simplifyMaxScale="1" autoRefreshEnabled="0" labelsEnabled="0" simplifyLocal="1" wkbType="MultiLineString" minScale="1e+08" simplifyDrawingTol="1" refreshOnNotifyMessage="" readOnly="0" hasScaleBasedVisibilityFlag="0" simplifyAlgorithm="0" autoRefreshTime="0">
<extent>
<xmin>328870.8826469536870718</xmin>
<ymin>1287794.26022111857309937</ymin>
<xmax>335980.8826469536870718</xmax>
<ymax>1291904.26022111857309937</ymax>
</extent>
<id>Channel_reaches__rivs1__514d2d76_3dcd_4834_8bd4_42392284ab2f</id>
<datasource>./Watershed/Shapes/rivs1.shp</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Channel reaches (rivs1)</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type>dataset</type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider encoding="UTF-8">ogr</provider>
<vectorjoins/>
<layerDependencies/>
<dataDependencies/>
<legend type="default-vector"/>
<expressionfields/>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<auxiliaryLayer/>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<renderer-v2 enableorderby="0" forceraster="0" symbollevels="0" type="RuleRenderer">
<rules key--open-curly--50ab9495-26fd-4273-9c00-a04b2e8a8b6c--close-curly--">
<rule key--open-curly--d08297ae-7ac2-40dd-b266-b666c1d99d5c--close-curly--" label="Channel" symbol="0" filter=" "Reservoir" = 0 AND "Pond" = 0"/>
<rule key--open-curly--49129db6-8dbb-4e40-ad9d-1d806820c5f4--close-curly--" label="Reservoir" symbol="1" filter=" "Reservoir" > 0 AND "Pond" = 0"/>
<rule key--open-curly--9ea290bc-2764-4fbe-bbdc-0d991d819325--close-curly--" label="Pond" description="Pond" symbol="2" filter=" "Reservoir" = 0 AND "Pond" > 0"/>
</rules>
<symbols>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="line" name="0">
<layer pass="0" locked="0" enabled="1" class="SimpleLine">
<prop k="capstyle" v="round"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="round"/>
<prop k="line_color" v="27,179,255,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.26"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="ring_filter" v="0"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="line" name="1">
<layer pass="0" locked="0" enabled="1" class="SimpleLine">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="0,85,255,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="2"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="ring_filter" v="0"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="line" name="2">
<layer pass="0" locked="0" enabled="1" class="SimpleLine">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="21,217,234,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="1"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="ring_filter" v="0"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
</symbols>
</renderer-v2>
<customproperties>
<property value="COALESCE("ID", '<NULL>')" key="dualview/previewExpressions"/>
<property value="0" key="embeddedWidgets/count"/>
<property key="variableNames"/>
<property key="variableValues"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerOpacity>1</layerOpacity>
<SingleCategoryDiagramRenderer attributeLegend="1" diagramType="Histogram">
<DiagramCategory scaleBasedVisibility="0" lineSizeType="MM" sizeScale="3x:0,0,0,0,0,0" rotationOffset="270" minimumSize="0" diagramOrientation="Up" barWidth="5" maxScaleDenominator="1e+08" opacity="1" labelPlacementMethod="XHeight" backgroundAlpha="255" lineSizeScale="3x:0,0,0,0,0,0" backgroundColor="#ffffff" sizeType="MM" penColor="#000000" height="15" enabled="0" penWidth="0" penAlpha="255" minScaleDenominator="0" scaleDependency="Area" width="15">
<fontProperties style="" description="MS Shell Dlg 2,8.25,-1,5,50,0,0,0,0,0"/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings dist="0" linePlacementFlags="2" zIndex="0" showAll="1" placement="2" obstacle="0" priority="0">
<properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</properties>
</DiagramLayerSettings>
<geometryOptions geometryPrecision="0" removeDuplicateNodes="0">
<activeChecks/>
<checkConfiguration/>
</geometryOptions>
<fieldConfiguration>
<field name="LINKNO">
<editWidget type="Range">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Channel">
<editWidget type="Range">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="ChannelR">
<editWidget type="Range">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Subbasin">
<editWidget type="Range">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="AreaC">
<editWidget type="TextEdit">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Len2">
<editWidget type="TextEdit">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Slo2">
<editWidget type="TextEdit">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Wid2">
<editWidget type="TextEdit">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Dep2">
<editWidget type="TextEdit">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="MinEl">
<editWidget type="TextEdit">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="MaxEl">
<editWidget type="TextEdit">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Reservoir">
<editWidget type="Range">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Pond">
<editWidget type="Range">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="LakeIn">
<editWidget type="Range">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="LakeOut">
<editWidget type="Range">
<config>
<Option/>
</config>
</editWidget>
</field>
</fieldConfiguration>
<aliases>
<alias field="LINKNO" index="0" name=""/>
<alias field="Channel" index="1" name=""/>
<alias field="ChannelR" index="2" name=""/>
<alias field="Subbasin" index="3" name=""/>
<alias field="AreaC" index="4" name=""/>
<alias field="Len2" index="5" name=""/>
<alias field="Slo2" index="6" name=""/>
<alias field="Wid2" index="7" name=""/>
<alias field="Dep2" index="8" name=""/>
<alias field="MinEl" index="9" name=""/>
<alias field="MaxEl" index="10" name=""/>
<alias field="Reservoir" index="11" name=""/>
<alias field="Pond" index="12" name=""/>
<alias field="LakeIn" index="13" name=""/>
<alias field="LakeOut" index="14" name=""/>
</aliases>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<defaults>
<default field="LINKNO" applyOnUpdate="0" expression=""/>
<default field="Channel" applyOnUpdate="0" expression=""/>
<default field="ChannelR" applyOnUpdate="0" expression=""/>
<default field="Subbasin" applyOnUpdate="0" expression=""/>
<default field="AreaC" applyOnUpdate="0" expression=""/>
<default field="Len2" applyOnUpdate="0" expression=""/>
<default field="Slo2" applyOnUpdate="0" expression=""/>
<default field="Wid2" applyOnUpdate="0" expression=""/>
<default field="Dep2" applyOnUpdate="0" expression=""/>
<default field="MinEl" applyOnUpdate="0" expression=""/>
<default field="MaxEl" applyOnUpdate="0" expression=""/>
<default field="Reservoir" applyOnUpdate="0" expression=""/>
<default field="Pond" applyOnUpdate="0" expression=""/>
<default field="LakeIn" applyOnUpdate="0" expression=""/>
<default field="LakeOut" applyOnUpdate="0" expression=""/>
</defaults>
<constraints>
<constraint exp_strength="0" field="LINKNO" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Channel" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="ChannelR" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Subbasin" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="AreaC" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Len2" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Slo2" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Wid2" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Dep2" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="MinEl" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="MaxEl" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Reservoir" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Pond" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="LakeIn" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="LakeOut" unique_strength="0" notnull_strength="0" constraints="0"/>
</constraints>
<constraintExpressions>
<constraint field="LINKNO" exp="" desc=""/>
<constraint field="Channel" exp="" desc=""/>
<constraint field="ChannelR" exp="" desc=""/>
<constraint field="Subbasin" exp="" desc=""/>
<constraint field="AreaC" exp="" desc=""/>
<constraint field="Len2" exp="" desc=""/>
<constraint field="Slo2" exp="" desc=""/>
<constraint field="Wid2" exp="" desc=""/>
<constraint field="Dep2" exp="" desc=""/>
<constraint field="MinEl" exp="" desc=""/>
<constraint field="MaxEl" exp="" desc=""/>
<constraint field="Reservoir" exp="" desc=""/>
<constraint field="Pond" exp="" desc=""/>
<constraint field="LakeIn" exp="" desc=""/>
<constraint field="LakeOut" exp="" desc=""/>
</constraintExpressions>
<expressionfields/>
<attributeactions>
<defaultAction value--open-curly--00000000-0000-0000-0000-000000000000--close-curly--" key="Canvas"/>
</attributeactions>
<attributetableconfig sortExpression=""Pond"" sortOrder="1" actionWidgetStyle="dropDown">
<columns>
<column width="-1" hidden="0" type="field" name="Channel"/>
<column width="-1" hidden="0" type="field" name="ChannelR"/>
<column width="-1" hidden="0" type="field" name="Subbasin"/>
<column width="-1" hidden="0" type="field" name="AreaC"/>
<column width="-1" hidden="0" type="field" name="Len2"/>
<column width="-1" hidden="0" type="field" name="Slo2"/>
<column width="-1" hidden="0" type="field" name="Wid2"/>
<column width="-1" hidden="0" type="field" name="Dep2"/>
<column width="-1" hidden="0" type="field" name="MinEl"/>
<column width="-1" hidden="0" type="field" name="MaxEl"/>
<column width="-1" hidden="0" type="field" name="Reservoir"/>
<column width="-1" hidden="1" type="actions"/>
<column width="-1" hidden="0" type="field" name="LINKNO"/>
<column width="-1" hidden="0" type="field" name="Pond"/>
<column width="-1" hidden="0" type="field" name="LakeIn"/>
<column width="-1" hidden="0" type="field" name="LakeOut"/>
</columns>
</attributetableconfig>
<conditionalstyles>
<rowstyles/>
<fieldstyles/>
</conditionalstyles>
<storedexpressions/>
<editform tolerant="1">../../../../../PROGRA~1/QGIS3~1.4/bin</editform>
<editforminit/>
<editforminitcodesource>0</editforminitcodesource>
<editforminitfilepath>../../../../../PROGRA~1/QGIS3~1.4/bin</editforminitfilepath>
<editforminitcode><![CDATA[# -*- coding: utf-8 -*-
"""
QGIS forms can have a Python function that is called when the form is
opened.
Use this function to add extra logic to your forms.
Enter the name of the function in the "Python Init function"
field.
An example follows:
"""
from qgis.PyQt.QtWidgets import QWidget
def my_form_open(dialog, layer, feature):
geom = feature.geometry()
control = dialog.findChild(QWidget, "MyLineEdit")
]]></editforminitcode>
<featformsuppress>0</featformsuppress>
<editorlayout>generatedlayout</editorlayout>
<editable>
<field editable="1" name="AreaC"/>
<field editable="1" name="Channel"/>
<field editable="1" name="ChannelR"/>
<field editable="1" name="Dep2"/>
<field editable="1" name="LINKNO"/>
<field editable="1" name="LakeIn"/>
<field editable="1" name="LakeOut"/>
<field editable="1" name="Len2"/>
<field editable="1" name="MaxEl"/>
<field editable="1" name="MinEl"/>
<field editable="1" name="Pond"/>
<field editable="1" name="Reservoir"/>
<field editable="1" name="Slo2"/>
<field editable="1" name="Subbasin"/>
<field editable="1" name="Wid2"/>
</editable>
<labelOnTop>
<field labelOnTop="0" name="AreaC"/>
<field labelOnTop="0" name="Channel"/>
<field labelOnTop="0" name="ChannelR"/>
<field labelOnTop="0" name="Dep2"/>
<field labelOnTop="0" name="LINKNO"/>
<field labelOnTop="0" name="LakeIn"/>
<field labelOnTop="0" name="LakeOut"/>
<field labelOnTop="0" name="Len2"/>
<field labelOnTop="0" name="MaxEl"/>
<field labelOnTop="0" name="MinEl"/>
<field labelOnTop="0" name="Pond"/>
<field labelOnTop="0" name="Reservoir"/>
<field labelOnTop="0" name="Slo2"/>
<field labelOnTop="0" name="Subbasin"/>
<field labelOnTop="0" name="Wid2"/>
</labelOnTop>
<widgets/>
<previewExpression>COALESCE("ID", '<NULL>')</previewExpression>
<mapTip>ID</mapTip>
</maplayer>
<maplayer geometry="Line" maxScale="0" refreshOnNotifyEnabled="0" type="vector" styleCategories="AllStyleCategories" simplifyDrawingHints="1" simplifyMaxScale="1" autoRefreshEnabled="0" labelsEnabled="1" simplifyLocal="1" wkbType="MultiLineString" minScale="1e+08" simplifyDrawingTol="1" refreshOnNotifyMessage="" readOnly="0" hasScaleBasedVisibilityFlag="0" simplifyAlgorithm="0" autoRefreshTime="0">
<extent>
<xmin>328870.8826469536870718</xmin>
<ymin>1287794.26022111857309937</ymin>
<xmax>335980.8826469536870718</xmax>
<ymax>1291904.26022111857309937</ymax>
</extent>
<id>Channels__{dem_name}channel__a7e3608c_b71d_44f6_8194_67e56bb7c543</id>
<datasource>./Watershed/Shapes/{dem_name}channel.shp</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Channels ({dem_name}channel)</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type>dataset</type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider encoding="UTF-8">ogr</provider>
<vectorjoins/>
<layerDependencies/>
<dataDependencies/>
<legend type="default-vector"/>
<expressionfields/>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<auxiliaryLayer/>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<renderer-v2 enableorderby="0" forceraster="0" symbollevels="0" type="singleSymbol">
<symbols>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="line" name="0">
<layer pass="0" locked="0" enabled="1" class="SimpleLine">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="27,179,255,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.26"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="ring_filter" v="0"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
</symbols>
<rotation/>
<sizescale/>
</renderer-v2>
<customproperties>
<property value="0" key="embeddedWidgets/count"/>
<property key="variableNames"/>
<property key="variableValues"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerOpacity>1</layerOpacity>
<SingleCategoryDiagramRenderer attributeLegend="1" diagramType="Histogram">
<DiagramCategory scaleBasedVisibility="0" lineSizeType="MM" sizeScale="3x:0,0,0,0,0,0" rotationOffset="270" minimumSize="0" diagramOrientation="Up" barWidth="5" maxScaleDenominator="1e+08" opacity="1" labelPlacementMethod="XHeight" backgroundAlpha="255" lineSizeScale="3x:0,0,0,0,0,0" backgroundColor="#ffffff" sizeType="MM" penColor="#000000" height="15" enabled="0" penWidth="0" penAlpha="255" minScaleDenominator="inf" scaleDependency="Area" width="15">
<fontProperties style="" description="Ubuntu,8,-1,5,1,0,0,0,0,0"/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings dist="0" linePlacementFlags="10" zIndex="0" showAll="1" placement="2" obstacle="0" priority="0">
<properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</properties>
</DiagramLayerSettings>
<geometryOptions geometryPrecision="0" removeDuplicateNodes="0">
<activeChecks/>
<checkConfiguration/>
</geometryOptions>
<fieldConfiguration>
<field name="LINKNO">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DSLINKNO">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="USLINKNO1">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="USLINKNO2">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DSNODEID">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Order">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Length">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Magnitude">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DS_Cont_Ar">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Drop">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Slope">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Straight_L">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="US_Cont_Ar">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="WSNO">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DOUT_END">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DOUT_START">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DOUT_MID">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="BasinNo">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
</fieldConfiguration>
<aliases>
<alias field="LINKNO" index="0" name=""/>
<alias field="DSLINKNO" index="1" name=""/>
<alias field="USLINKNO1" index="2" name=""/>
<alias field="USLINKNO2" index="3" name=""/>
<alias field="DSNODEID" index="4" name=""/>
<alias field="Order" index="5" name=""/>
<alias field="Length" index="6" name=""/>
<alias field="Magnitude" index="7" name=""/>
<alias field="DS_Cont_Ar" index="8" name=""/>
<alias field="Drop" index="9" name=""/>
<alias field="Slope" index="10" name=""/>
<alias field="Straight_L" index="11" name=""/>
<alias field="US_Cont_Ar" index="12" name=""/>
<alias field="WSNO" index="13" name=""/>
<alias field="DOUT_END" index="14" name=""/>
<alias field="DOUT_START" index="15" name=""/>
<alias field="DOUT_MID" index="16" name=""/>
<alias field="BasinNo" index="17" name=""/>
</aliases>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<defaults>
<default field="LINKNO" applyOnUpdate="0" expression=""/>
<default field="DSLINKNO" applyOnUpdate="0" expression=""/>
<default field="USLINKNO1" applyOnUpdate="0" expression=""/>
<default field="USLINKNO2" applyOnUpdate="0" expression=""/>
<default field="DSNODEID" applyOnUpdate="0" expression=""/>
<default field="Order" applyOnUpdate="0" expression=""/>
<default field="Length" applyOnUpdate="0" expression=""/>
<default field="Magnitude" applyOnUpdate="0" expression=""/>
<default field="DS_Cont_Ar" applyOnUpdate="0" expression=""/>
<default field="Drop" applyOnUpdate="0" expression=""/>
<default field="Slope" applyOnUpdate="0" expression=""/>
<default field="Straight_L" applyOnUpdate="0" expression=""/>
<default field="US_Cont_Ar" applyOnUpdate="0" expression=""/>
<default field="WSNO" applyOnUpdate="0" expression=""/>
<default field="DOUT_END" applyOnUpdate="0" expression=""/>
<default field="DOUT_START" applyOnUpdate="0" expression=""/>
<default field="DOUT_MID" applyOnUpdate="0" expression=""/>
<default field="BasinNo" applyOnUpdate="0" expression=""/>
</defaults>
<constraints>
<constraint exp_strength="0" field="LINKNO" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DSLINKNO" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="USLINKNO1" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="USLINKNO2" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DSNODEID" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Order" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Length" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Magnitude" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DS_Cont_Ar" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Drop" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Slope" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Straight_L" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="US_Cont_Ar" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="WSNO" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DOUT_END" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DOUT_START" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DOUT_MID" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="BasinNo" unique_strength="0" notnull_strength="0" constraints="0"/>
</constraints>
<constraintExpressions>
<constraint field="LINKNO" exp="" desc=""/>
<constraint field="DSLINKNO" exp="" desc=""/>
<constraint field="USLINKNO1" exp="" desc=""/>
<constraint field="USLINKNO2" exp="" desc=""/>
<constraint field="DSNODEID" exp="" desc=""/>
<constraint field="Order" exp="" desc=""/>
<constraint field="Length" exp="" desc=""/>
<constraint field="Magnitude" exp="" desc=""/>
<constraint field="DS_Cont_Ar" exp="" desc=""/>
<constraint field="Drop" exp="" desc=""/>
<constraint field="Slope" exp="" desc=""/>
<constraint field="Straight_L" exp="" desc=""/>
<constraint field="US_Cont_Ar" exp="" desc=""/>
<constraint field="WSNO" exp="" desc=""/>
<constraint field="DOUT_END" exp="" desc=""/>
<constraint field="DOUT_START" exp="" desc=""/>
<constraint field="DOUT_MID" exp="" desc=""/>
<constraint field="BasinNo" exp="" desc=""/>
</constraintExpressions>
<expressionfields/>
<attributeactions/>
<attributetableconfig sortExpression="" sortOrder="1" actionWidgetStyle="dropDown">
<columns>
<column width="-1" hidden="0" type="field" name="LINKNO"/>
<column width="-1" hidden="0" type="field" name="DSLINKNO"/>
<column width="-1" hidden="0" type="field" name="USLINKNO1"/>
<column width="-1" hidden="0" type="field" name="USLINKNO2"/>
<column width="-1" hidden="0" type="field" name="DSNODEID"/>
<column width="-1" hidden="0" type="field" name="Order"/>
<column width="-1" hidden="0" type="field" name="Length"/>
<column width="-1" hidden="0" type="field" name="Magnitude"/>
<column width="-1" hidden="0" type="field" name="DS_Cont_Ar"/>
<column width="-1" hidden="0" type="field" name="Drop"/>
<column width="-1" hidden="0" type="field" name="Slope"/>
<column width="-1" hidden="0" type="field" name="Straight_L"/>
<column width="-1" hidden="0" type="field" name="US_Cont_Ar"/>
<column width="-1" hidden="0" type="field" name="WSNO"/>
<column width="-1" hidden="0" type="field" name="DOUT_END"/>
<column width="-1" hidden="0" type="field" name="DOUT_START"/>
<column width="-1" hidden="0" type="field" name="DOUT_MID"/>
<column width="-1" hidden="0" type="field" name="BasinNo"/>
<column width="-1" hidden="1" type="actions"/>
</columns>
</attributetableconfig>
<conditionalstyles>
<rowstyles/>
<fieldstyles/>
</conditionalstyles>
<storedexpressions/>
<editform tolerant="1"></editform>
<editforminit/>
<editforminitcodesource>0</editforminitcodesource>
<editforminitfilepath></editforminitfilepath>
<editforminitcode><![CDATA[# -*- coding: utf-8 -*-
"""
QGIS forms can have a Python function that is called when the form is
opened.
Use this function to add extra logic to your forms.
Enter the name of the function in the "Python Init function"
field.
An example follows:
"""
from qgis.PyQt.QtWidgets import QWidget
def my_form_open(dialog, layer, feature):
geom = feature.geometry()
control = dialog.findChild(QWidget, "MyLineEdit")
]]></editforminitcode>
<featformsuppress>0</featformsuppress>
<editorlayout>generatedlayout</editorlayout>
<editable/>
<labelOnTop/>
<widgets/>
<previewExpression>"DSNODEID"</previewExpression>
<mapTip></mapTip>
</maplayer>
<maplayer hasScaleBasedVisibilityFlag="0" styleCategories="AllStyleCategories" refreshOnNotifyEnabled="0" autoRefreshTime="0" minScale="1e+08" refreshOnNotifyMessage="" type="raster" autoRefreshEnabled="0" maxScale="0">
<extent>
<xmin>326065.8826469536870718</xmin>
<ymin>1286069.26022111857309937</ymin>
<xmax>338065.8826469536870718</xmax>
<ymax>1293509.26022111857309937</ymax>
</extent>
<id>DEM__{dem_name}__f751ab49_fdac_4766_be7f_300fbfe6adf2</id>
<datasource>./Watershed/Rasters/DEM/{dem_file_name}</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>DEM ({dem_name})</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type></type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider>gdal</provider>
<noData>
<noDataList useSrcNoData="1" bandNo="1"/>
</noData>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<customproperties>
<property value="Value" key="identify/format"/>
</customproperties>
<pipe>
<rasterrenderer opacity="1" alphaBand="-1" band="1" classificationMin="nan" type="singlebandpseudocolor" classificationMax="nan">
<minMaxOrigin>
<limits>None</limits>
<extent>WholeRaster</extent>
<statAccuracy>Estimated</statAccuracy>
<cumulativeCutLower>0.02</cumulativeCutLower>
<cumulativeCutUpper>0.98</cumulativeCutUpper>
<stdDevFactor>2</stdDevFactor>
</minMaxOrigin>
<rastershader>
<colorrampshader clip="0" colorRampType="INTERPOLATED" classificationMode="1">
<item value="{dem_min}" color="#0a640a" label="{dem_min} - {lower_third}" alpha="255"/>
<item value="{mid_thirds}" color="#997d19" label="{lower_third} - {upper_third}" alpha="255"/>
<item value="{dem_max}" color="#ffffff" label="{upper_third} - {dem_max}" alpha="255"/>
</colorrampshader>
</rastershader>
</rasterrenderer>
<brightnesscontrast contrast="0" brightness="0"/>
<huesaturation colorizeStrength="100" colorizeOn="0" grayscaleMode="0" colorizeGreen="128" saturation="0" colorizeBlue="128" colorizeRed="255"/>
<rasterresampler maxOversampling="2"/>
</pipe>
<blendMode>0</blendMode>
</maplayer>
<maplayer geometry="Point" maxScale="-4.65661e-10" refreshOnNotifyEnabled="0" type="vector" styleCategories="AllStyleCategories" simplifyDrawingHints="0" simplifyMaxScale="1" autoRefreshEnabled="0" labelsEnabled="0" simplifyLocal="1" wkbType="Point" minScale="1e+08" simplifyDrawingTol="1" refreshOnNotifyMessage="" readOnly="0" hasScaleBasedVisibilityFlag="0" simplifyAlgorithm="0" autoRefreshTime="0">
<extent>
<xmin>328872.81934636097867042</xmin>
<ymin>1290232.52846676157787442</ymin>
<xmax>328872.81934636097867042</xmax>
<ymax>1290232.52846676157787442</ymax>
</extent>
<id>Drawn_inlets_outlets__{outlet_name}__c41cb90c_f1d6_4ffe_8a64_99bcb575d961</id>
<datasource>./Watershed/Shapes/{outlet_name}.shp</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Drawn inlets/outlets ({outlet_name})</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type>dataset</type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider encoding="UTF-8">ogr</provider>
<vectorjoins/>
<layerDependencies/>
<dataDependencies/>
<legend type="default-vector"/>
<expressionfields/>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<auxiliaryLayer/>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<renderer-v2 enableorderby="0" forceraster="0" symbollevels="0" type="RuleRenderer">
<rules key--open-curly--53a471a4-aa86-43ed-9d97-be98a958b7e1--close-curly--">
<rule key--open-curly--57a0b081-f9fc-46d9-a9f0-302b4d29c4d2--close-curly--" label="Outlet" description="Outlet" symbol="0" filter=" "INLET" = 0 AND "RES" = 0"/>
<rule key--open-curly--2b092b12-ae87-4524-bf91-ee93b3bfee30--close-curly--" label="Inlet" description="Inlet" symbol="1" filter=" "INLET" = 1 AND "PTSOURCE" = 0"/>
<rule key--open-curly--cc102ca4-a33c-498d-a2fc-8e598f588d20--close-curly--" label="Reservoir" symbol="2" filter=" "INLET" = 0 AND "RES" = 1"/>
<rule key--open-curly--0ede00e4-44a0-41de-b5a4-41741e7a90ad--close-curly--" label="Pond" description="Pond" symbol="3" filter=""INLET" = 0 AND "RES" = 2"/>
<rule key--open-curly--bb3546f0-1b2c-49be-a16f-9bb5728352fd--close-curly--" label="Point source" description="Point source" symbol="4" filter=" "INLET" = 1 AND "PTSOURCE" = 1"/>
</rules>
<symbols>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="0">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="0,85,255,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="filled_arrowhead"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="4"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="1">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="180"/>
<prop k="color" v="255,0,0,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="filled_arrowhead"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="4"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="2">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="0,85,255,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="4"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="3">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="30,55,244,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="35,35,35,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="diameter"/>
<prop k="size" v="2.6"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="4">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="255,0,0,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="2"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
</symbols>
</renderer-v2>
<customproperties>
<property value=""ID"" key="dualview/previewExpressions"/>
<property value="0" key="embeddedWidgets/count"/>
<property key="variableNames"/>
<property key="variableValues"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerOpacity>1</layerOpacity>
<SingleCategoryDiagramRenderer attributeLegend="1" diagramType="Histogram">
<DiagramCategory scaleBasedVisibility="0" lineSizeType="MM" sizeScale="3x:0,0,0,0,0,0" rotationOffset="270" minimumSize="0" diagramOrientation="Up" barWidth="5" maxScaleDenominator="1e+08" opacity="1" labelPlacementMethod="XHeight" backgroundAlpha="255" lineSizeScale="3x:0,0,0,0,0,0" backgroundColor="#ffffff" sizeType="MM" penColor="#000000" height="15" enabled="0" penWidth="0" penAlpha="255" minScaleDenominator="-4.65661e-10" scaleDependency="Area" width="15">
<fontProperties style="" description="Ubuntu,8,-1,5,1,0,0,0,0,0"/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings dist="0" linePlacementFlags="18" zIndex="0" showAll="1" placement="0" obstacle="0" priority="0">
<properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</properties>
</DiagramLayerSettings>
<geometryOptions geometryPrecision="0" removeDuplicateNodes="0">
<activeChecks/>
<checkConfiguration/>
</geometryOptions>
<fieldConfiguration>
<field name="ID">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
<field name="INLET">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
<field name="RES">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
<field name="PTSOURCE">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
</fieldConfiguration>
<aliases>
<alias field="ID" index="0" name=""/>
<alias field="INLET" index="1" name=""/>
<alias field="RES" index="2" name=""/>
<alias field="PTSOURCE" index="3" name=""/>
</aliases>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<defaults>
<default field="ID" applyOnUpdate="0" expression=""/>
<default field="INLET" applyOnUpdate="0" expression=""/>
<default field="RES" applyOnUpdate="0" expression=""/>
<default field="PTSOURCE" applyOnUpdate="0" expression=""/>
</defaults>
<constraints>
<constraint exp_strength="0" field="ID" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="INLET" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="RES" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="PTSOURCE" unique_strength="0" notnull_strength="0" constraints="0"/>
</constraints>
<constraintExpressions>
<constraint field="ID" exp="" desc=""/>
<constraint field="INLET" exp="" desc=""/>
<constraint field="RES" exp="" desc=""/>
<constraint field="PTSOURCE" exp="" desc=""/>
</constraintExpressions>
<expressionfields/>
<attributeactions>
<defaultAction value--open-curly--00000000-0000-0000-0000-000000000000--close-curly--" key="Canvas"/>
</attributeactions>
<attributetableconfig sortExpression="" sortOrder="0" actionWidgetStyle="dropDown">
<columns>
<column width="-1" hidden="0" type="field" name="PTSOURCE"/>
<column width="-1" hidden="0" type="field" name="RES"/>
<column width="-1" hidden="0" type="field" name="INLET"/>
<column width="-1" hidden="0" type="field" name="ID"/>
<column width="-1" hidden="1" type="actions"/>
</columns>
</attributetableconfig>
<conditionalstyles>
<rowstyles/>
<fieldstyles/>
</conditionalstyles>
<storedexpressions/>
<editform tolerant="1">QSWATPlus_Projects/SanJuan/test1</editform>
<editforminit/>
<editforminitcodesource>0</editforminitcodesource>
<editforminitfilepath></editforminitfilepath>
<editforminitcode><![CDATA[# -*- coding: utf-8 -*-
"""
QGIS forms can have a Python function that is called when the form is
opened.
Use this function to add extra logic to your forms.
Enter the name of the function in the "Python Init function"
field.
An example follows:
"""
from qgis.PyQt.QtWidgets import QWidget
def my_form_open(dialog, layer, feature):
geom = feature.geometry()
control = dialog.findChild(QWidget, "MyLineEdit")
]]></editforminitcode>
<featformsuppress>0</featformsuppress>
<editorlayout>generatedlayout</editorlayout>
<editable>
<field editable="1" name="ID"/>
<field editable="1" name="INLET"/>
<field editable="1" name="PTSOURCE"/>
<field editable="1" name="RES"/>
</editable>
<labelOnTop>
<field labelOnTop="0" name="ID"/>
<field labelOnTop="0" name="INLET"/>
<field labelOnTop="0" name="PTSOURCE"/>
<field labelOnTop="0" name="RES"/>
</labelOnTop>
<widgets/>
<previewExpression>ID</previewExpression>
<mapTip></mapTip>
</maplayer>
<maplayer geometry="Polygon" maxScale="-4.65661e-10" refreshOnNotifyEnabled="0" type="vector" styleCategories="AllStyleCategories" simplifyDrawingHints="1" simplifyMaxScale="1" autoRefreshEnabled="0" labelsEnabled="1" simplifyLocal="1" wkbType="MultiPolygon" minScale="1e+08" simplifyDrawingTol="1" refreshOnNotifyMessage="" readOnly="0" hasScaleBasedVisibilityFlag="0" simplifyAlgorithm="0" autoRefreshTime="0">
<extent>
<xmin>328825.8826469536870718</xmin>
<ymin>1287329.26022111857309937</ymin>
<xmax>336355.8826469536870718</xmax>
<ymax>1292189.26022111857309937</ymax>
</extent>
<id>Full_HRUs__hrus1__4e2ba365_e7bd_4f8e_9d6c_79056945afb5</id>
<datasource>./Watershed/Shapes/hrus1.shp</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Full HRUs (hrus1)</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type>dataset</type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider encoding="UTF-8">ogr</provider>
<vectorjoins/>
<layerDependencies/>
<dataDependencies/>
<legend type="default-vector"/>
<expressionfields/>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<auxiliaryLayer/>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<renderer-v2 enableorderby="0" forceraster="0" symbollevels="0" type="singleSymbol">
<symbols>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="fill" name="0">
<layer pass="0" locked="0" enabled="1" class="SimpleFill">
<prop k="border_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="color" v="220,255,212,255"/>
<prop k="joinstyle" v="bevel"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0.06"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="style" v="solid"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
</symbols>
<rotation/>
<sizescale/>
</renderer-v2>
<customproperties/>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerOpacity>1</layerOpacity>
<geometryOptions geometryPrecision="0" removeDuplicateNodes="0">
<activeChecks/>
<checkConfiguration/>
</geometryOptions>
<fieldConfiguration>
<field name="Subbasin">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Channel">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Landscape">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Landuse">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Soil">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="SlopeBand">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Area">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="%Subbasin">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="%Landscape">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="HRUS">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="LINKNO">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
</fieldConfiguration>
<aliases>
<alias field="Subbasin" index="0" name=""/>
<alias field="Channel" index="1" name=""/>
<alias field="Landscape" index="2" name=""/>
<alias field="Landuse" index="3" name=""/>
<alias field="Soil" index="4" name=""/>
<alias field="SlopeBand" index="5" name=""/>
<alias field="Area" index="6" name=""/>
<alias field="%Subbasin" index="7" name=""/>
<alias field="%Landscape" index="8" name=""/>
<alias field="HRUS" index="9" name=""/>
<alias field="LINKNO" index="10" name=""/>
</aliases>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<defaults>
<default field="Subbasin" applyOnUpdate="0" expression=""/>
<default field="Channel" applyOnUpdate="0" expression=""/>
<default field="Landscape" applyOnUpdate="0" expression=""/>
<default field="Landuse" applyOnUpdate="0" expression=""/>
<default field="Soil" applyOnUpdate="0" expression=""/>
<default field="SlopeBand" applyOnUpdate="0" expression=""/>
<default field="Area" applyOnUpdate="0" expression=""/>
<default field="%Subbasin" applyOnUpdate="0" expression=""/>
<default field="%Landscape" applyOnUpdate="0" expression=""/>
<default field="HRUS" applyOnUpdate="0" expression=""/>
<default field="LINKNO" applyOnUpdate="0" expression=""/>
</defaults>
<constraints>
<constraint exp_strength="0" field="Subbasin" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Channel" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Landscape" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Landuse" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Soil" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="SlopeBand" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Area" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="%Subbasin" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="%Landscape" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="HRUS" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="LINKNO" unique_strength="0" notnull_strength="0" constraints="0"/>
</constraints>
<constraintExpressions>
<constraint field="Subbasin" exp="" desc=""/>
<constraint field="Channel" exp="" desc=""/>
<constraint field="Landscape" exp="" desc=""/>
<constraint field="Landuse" exp="" desc=""/>
<constraint field="Soil" exp="" desc=""/>
<constraint field="SlopeBand" exp="" desc=""/>
<constraint field="Area" exp="" desc=""/>
<constraint field="%Subbasin" exp="" desc=""/>
<constraint field="%Landscape" exp="" desc=""/>
<constraint field="HRUS" exp="" desc=""/>
<constraint field="LINKNO" exp="" desc=""/>
</constraintExpressions>
<expressionfields/>
<attributeactions/>
<attributetableconfig sortExpression="" sortOrder="0" actionWidgetStyle="dropDown">
<columns/>
</attributetableconfig>
<conditionalstyles>
<rowstyles/>
<fieldstyles/>
</conditionalstyles>
<storedexpressions/>
<editform tolerant="1">../../../Documents</editform>
<editforminit/>
<editforminitcodesource>0</editforminitcodesource>
<editforminitfilepath></editforminitfilepath>
<editforminitcode><![CDATA[]]></editforminitcode>
<featformsuppress>0</featformsuppress>
<editorlayout>generatedlayout</editorlayout>
<editable/>
<labelOnTop/>
<widgets/>
<previewExpression>"SUBBASIN"</previewExpression>
<mapTip></mapTip>
</maplayer>
<maplayer geometry="Polygon" maxScale="0" refreshOnNotifyEnabled="0" type="vector" styleCategories="AllStyleCategories" simplifyDrawingHints="1" simplifyMaxScale="1" autoRefreshEnabled="0" labelsEnabled="1" simplifyLocal="1" wkbType="MultiPolygon" minScale="1e+08" simplifyDrawingTol="1" refreshOnNotifyMessage="" readOnly="0" hasScaleBasedVisibilityFlag="0" simplifyAlgorithm="0" autoRefreshTime="0">
<extent>
<xmin>328825.8826469536870718</xmin>
<ymin>1287329.26022111857309937</ymin>
<xmax>336355.8826469536870718</xmax>
<ymax>1292189.26022111857309937</ymax>
</extent>
<id>Full_LSUs__lsus1__8f4e9cfb_3ca6_4a70_83b9_fe977379bcf4</id>
<datasource>./Watershed/Shapes/lsus1.shp</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Full LSUs (lsus1)</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type>dataset</type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider encoding="UTF-8">ogr</provider>
<vectorjoins/>
<layerDependencies/>
<dataDependencies/>
<legend type="default-vector"/>
<expressionfields/>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<auxiliaryLayer/>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<renderer-v2 enableorderby="0" forceraster="0" symbollevels="0" type="singleSymbol">
<symbols>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="fill" name="0">
<layer pass="0" locked="0" enabled="1" class="SimpleFill">
<prop k="border_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="color" v="0,0,255,255"/>
<prop k="joinstyle" v="bevel"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="248,157,178,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0.3"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="style" v="no"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
</symbols>
<rotation/>
<sizescale/>
</renderer-v2>
<customproperties>
<property value="0" key="embeddedWidgets/count"/>
<property key="variableNames"/>
<property key="variableValues"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerOpacity>1</layerOpacity>
<SingleCategoryDiagramRenderer attributeLegend="1" diagramType="Histogram">
<DiagramCategory scaleBasedVisibility="0" lineSizeType="MM" sizeScale="3x:0,0,0,0,0,0" rotationOffset="270" minimumSize="0" diagramOrientation="Up" barWidth="5" maxScaleDenominator="1e+08" opacity="1" labelPlacementMethod="XHeight" backgroundAlpha="255" lineSizeScale="3x:0,0,0,0,0,0" backgroundColor="#ffffff" sizeType="MM" penColor="#000000" height="15" enabled="0" penWidth="0" penAlpha="255" minScaleDenominator="inf" scaleDependency="Area" width="15">
<fontProperties style="" description="Ubuntu,8,-1,5,1,0,0,0,0,0"/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings dist="0" linePlacementFlags="10" zIndex="0" showAll="1" placement="0" obstacle="0" priority="0">
<properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</properties>
</DiagramLayerSettings>
<geometryOptions geometryPrecision="0" removeDuplicateNodes="0">
<activeChecks/>
<checkConfiguration/>
</geometryOptions>
<fieldConfiguration>
<field name="Area">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Channel">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="LSUID">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Subbasin">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Landscape">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="%Subbasin">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
</fieldConfiguration>
<aliases>
<alias field="Area" index="0" name=""/>
<alias field="Channel" index="1" name=""/>
<alias field="LSUID" index="2" name=""/>
<alias field="Subbasin" index="3" name=""/>
<alias field="Landscape" index="4" name=""/>
<alias field="%Subbasin" index="5" name=""/>
</aliases>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<defaults>
<default field="Area" applyOnUpdate="0" expression=""/>
<default field="Channel" applyOnUpdate="0" expression=""/>
<default field="LSUID" applyOnUpdate="0" expression=""/>
<default field="Subbasin" applyOnUpdate="0" expression=""/>
<default field="Landscape" applyOnUpdate="0" expression=""/>
<default field="%Subbasin" applyOnUpdate="0" expression=""/>
</defaults>
<constraints>
<constraint exp_strength="0" field="Area" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Channel" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="LSUID" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Subbasin" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Landscape" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="%Subbasin" unique_strength="0" notnull_strength="0" constraints="0"/>
</constraints>
<constraintExpressions>
<constraint field="Area" exp="" desc=""/>
<constraint field="Channel" exp="" desc=""/>
<constraint field="LSUID" exp="" desc=""/>
<constraint field="Subbasin" exp="" desc=""/>
<constraint field="Landscape" exp="" desc=""/>
<constraint field="%Subbasin" exp="" desc=""/>
</constraintExpressions>
<expressionfields/>
<attributeactions/>
<attributetableconfig sortExpression="" sortOrder="0" actionWidgetStyle="dropDown">
<columns>
<column width="-1" hidden="0" type="field" name="AREA (ha)"/>
<column width="-1" hidden="0" type="field" name="Channel"/>
<column width="-1" hidden="0" type="field" name="LSUID"/>
<column width="-1" hidden="0" type="field" name="Subbasin"/>
<column width="-1" hidden="0" type="field" name="Landscape"/>
<column width="-1" hidden="0" type="field" name="%SUBBASIN"/>
<column width="-1" hidden="1" type="actions"/>
</columns>
</attributetableconfig>
<conditionalstyles>
<rowstyles/>
<fieldstyles/>
</conditionalstyles>
<storedexpressions/>
<editform tolerant="1"></editform>
<editforminit/>
<editforminitcodesource>0</editforminitcodesource>
<editforminitfilepath></editforminitfilepath>
<editforminitcode><![CDATA[# -*- coding: utf-8 -*-
"""
QGIS forms can have a Python function that is called when the form is
opened.
Use this function to add extra logic to your forms.
Enter the name of the function in the "Python Init function"
field.
An example follows:
"""
from qgis.PyQt.QtWidgets import QWidget
def my_form_open(dialog, layer, feature):
geom = feature.geometry()
control = dialog.findChild(QWidget, "MyLineEdit")
]]></editforminitcode>
<featformsuppress>0</featformsuppress>
<editorlayout>generatedlayout</editorlayout>
<editable/>
<labelOnTop/>
<widgets/>
<previewExpression>"LSUID"</previewExpression>
<mapTip></mapTip>
</maplayer>
<maplayer hasScaleBasedVisibilityFlag="0" styleCategories="AllStyleCategories" refreshOnNotifyEnabled="0" autoRefreshTime="0" minScale="1e+08" refreshOnNotifyMessage="" type="raster" autoRefreshEnabled="0" maxScale="0">
<extent>
<xmin>326065.8826469536870718</xmin>
<ymin>1286069.26022111857309937</ymin>
<xmax>338065.8826469536870718</xmax>
<ymax>1293509.26022111857309937</ymax>
</extent>
<id>Hillshade__{dem_name}hillshade__a6f33483_65e8_4cde_a966_948ff13f0c2a</id>
<datasource>./Watershed/Rasters/DEM/{dem_name}hillshade.tif</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Hillshade ({dem_name}hillshade)</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type></type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider>gdal</provider>
<noData>
<noDataList useSrcNoData="1" bandNo="1"/>
</noData>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<customproperties>
<property value="Value" key="identify/format"/>
</customproperties>
<pipe>
<rasterrenderer opacity="0.4" alphaBand="-1" gradient="BlackToWhite" grayBand="1" type="singlebandgray">
<rasterTransparency/>
<minMaxOrigin>
<limits>MinMax</limits>
<extent>WholeRaster</extent>
<statAccuracy>Estimated</statAccuracy>
<cumulativeCutLower>0.02</cumulativeCutLower>
<cumulativeCutUpper>0.98</cumulativeCutUpper>
<stdDevFactor>2</stdDevFactor>
</minMaxOrigin>
<contrastEnhancement>
<minValue>1</minValue>
<maxValue>255</maxValue>
<algorithm>StretchToMinimumMaximum</algorithm>
</contrastEnhancement>
</rasterrenderer>
<brightnesscontrast contrast="0" brightness="0"/>
<huesaturation colorizeStrength="100" colorizeOn="0" grayscaleMode="0" colorizeGreen="128" saturation="0" colorizeBlue="128" colorizeRed="255"/>
<rasterresampler maxOversampling="2"/>
</pipe>
<blendMode>0</blendMode>
</maplayer>
<maplayer geometry="Point" maxScale="-4.65661e-10" refreshOnNotifyEnabled="0" type="vector" styleCategories="AllStyleCategories" simplifyDrawingHints="0" simplifyMaxScale="1" autoRefreshEnabled="0" labelsEnabled="0" simplifyLocal="1" wkbType="Point" minScale="1e+08" simplifyDrawingTol="1" refreshOnNotifyMessage="" readOnly="0" hasScaleBasedVisibilityFlag="0" simplifyAlgorithm="0" autoRefreshTime="0">
<extent>
<xmin>328653</xmin>
<ymin>1290104</ymin>
<xmax>328653</xmax>
<ymax>1290104</ymax>
</extent>
<id>Inlets_outlets__{outlet_name}__0c49465a_2a2b_4ecb_ae4f_fbb60c4c1bcb</id>
<datasource>./Watershed/Shapes/{outlet_name}.shp</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Inlets/outlets ({outlet_name})</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type>dataset</type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider encoding="UTF-8">ogr</provider>
<vectorjoins/>
<layerDependencies/>
<dataDependencies/>
<legend type="default-vector"/>
<expressionfields/>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<auxiliaryLayer/>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<renderer-v2 enableorderby="0" forceraster="0" symbollevels="0" type="RuleRenderer">
<rules key--open-curly--53a471a4-aa86-43ed-9d97-be98a958b7e1--close-curly--">
<rule key--open-curly--57a0b081-f9fc-46d9-a9f0-302b4d29c4d2--close-curly--" label="Outlet" description="Outlet" symbol="0" filter=" "INLET" = 0 AND "RES" = 0"/>
<rule key--open-curly--2b092b12-ae87-4524-bf91-ee93b3bfee30--close-curly--" label="Inlet" description="Inlet" symbol="1" filter=" "INLET" = 1 AND "PTSOURCE" = 0"/>
<rule key--open-curly--cc102ca4-a33c-498d-a2fc-8e598f588d20--close-curly--" label="Reservoir" symbol="2" filter=" "INLET" = 0 AND "RES" = 1"/>
<rule key--open-curly--0ede00e4-44a0-41de-b5a4-41741e7a90ad--close-curly--" label="Pond" description="Pond" symbol="3" filter=""INLET" = 0 AND "RES" = 2"/>
<rule key--open-curly--bb3546f0-1b2c-49be-a16f-9bb5728352fd--close-curly--" label="Point source" description="Point source" symbol="4" filter=" "INLET" = 1 AND "PTSOURCE" = 1"/>
</rules>
<symbols>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="0">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="0,85,255,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="filled_arrowhead"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="4"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="1">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="180"/>
<prop k="color" v="255,0,0,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="filled_arrowhead"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="4"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="2">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="0,85,255,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="4"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="3">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="30,55,244,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="35,35,35,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="diameter"/>
<prop k="size" v="2.6"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="4">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="255,0,0,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="2"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
</symbols>
</renderer-v2>
<customproperties>
<property value=""ID"" key="dualview/previewExpressions"/>
<property value="0" key="embeddedWidgets/count"/>
<property key="variableNames"/>
<property key="variableValues"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerOpacity>1</layerOpacity>
<SingleCategoryDiagramRenderer attributeLegend="1" diagramType="Histogram">
<DiagramCategory scaleBasedVisibility="0" lineSizeType="MM" sizeScale="3x:0,0,0,0,0,0" rotationOffset="270" minimumSize="0" diagramOrientation="Up" barWidth="5" maxScaleDenominator="1e+08" opacity="1" labelPlacementMethod="XHeight" backgroundAlpha="255" lineSizeScale="3x:0,0,0,0,0,0" backgroundColor="#ffffff" sizeType="MM" penColor="#000000" height="15" enabled="0" penWidth="0" penAlpha="255" minScaleDenominator="-4.65661e-10" scaleDependency="Area" width="15">
<fontProperties style="" description="Ubuntu,8,-1,5,1,0,0,0,0,0"/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings dist="0" linePlacementFlags="18" zIndex="0" showAll="1" placement="0" obstacle="0" priority="0">
<properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</properties>
</DiagramLayerSettings>
<geometryOptions geometryPrecision="0" removeDuplicateNodes="0">
<activeChecks/>
<checkConfiguration/>
</geometryOptions>
<fieldConfiguration>
<field name="ID">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
<field name="INLET">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
<field name="RES">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
<field name="PTSOURCE">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
</fieldConfiguration>
<aliases>
<alias field="ID" index="0" name=""/>
<alias field="INLET" index="1" name=""/>
<alias field="RES" index="2" name=""/>
<alias field="PTSOURCE" index="3" name=""/>
</aliases>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<defaults>
<default field="ID" applyOnUpdate="0" expression=""/>
<default field="INLET" applyOnUpdate="0" expression=""/>
<default field="RES" applyOnUpdate="0" expression=""/>
<default field="PTSOURCE" applyOnUpdate="0" expression=""/>
</defaults>
<constraints>
<constraint exp_strength="0" field="ID" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="INLET" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="RES" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="PTSOURCE" unique_strength="0" notnull_strength="0" constraints="0"/>
</constraints>
<constraintExpressions>
<constraint field="ID" exp="" desc=""/>
<constraint field="INLET" exp="" desc=""/>
<constraint field="RES" exp="" desc=""/>
<constraint field="PTSOURCE" exp="" desc=""/>
</constraintExpressions>
<expressionfields/>
<attributeactions>
<defaultAction value--open-curly--00000000-0000-0000-0000-000000000000--close-curly--" key="Canvas"/>
</attributeactions>
<attributetableconfig sortExpression="" sortOrder="0" actionWidgetStyle="dropDown">
<columns>
<column width="-1" hidden="0" type="field" name="PTSOURCE"/>
<column width="-1" hidden="0" type="field" name="RES"/>
<column width="-1" hidden="0" type="field" name="INLET"/>
<column width="-1" hidden="0" type="field" name="ID"/>
<column width="-1" hidden="1" type="actions"/>
</columns>
</attributetableconfig>
<conditionalstyles>
<rowstyles/>
<fieldstyles/>
</conditionalstyles>
<storedexpressions/>
<editform tolerant="1">QSWATPlus_Projects/SanJuan/test1</editform>
<editforminit/>
<editforminitcodesource>0</editforminitcodesource>
<editforminitfilepath></editforminitfilepath>
<editforminitcode><![CDATA[# -*- coding: utf-8 -*-
"""
QGIS forms can have a Python function that is called when the form is
opened.
Use this function to add extra logic to your forms.
Enter the name of the function in the "Python Init function"
field.
An example follows:
"""
from qgis.PyQt.QtWidgets import QWidget
def my_form_open(dialog, layer, feature):
geom = feature.geometry()
control = dialog.findChild(QWidget, "MyLineEdit")
]]></editforminitcode>
<featformsuppress>0</featformsuppress>
<editorlayout>generatedlayout</editorlayout>
<editable>
<field editable="1" name="ID"/>
<field editable="1" name="INLET"/>
<field editable="1" name="PTSOURCE"/>
<field editable="1" name="RES"/>
</editable>
<labelOnTop>
<field labelOnTop="0" name="ID"/>
<field labelOnTop="0" name="INLET"/>
<field labelOnTop="0" name="PTSOURCE"/>
<field labelOnTop="0" name="RES"/>
</labelOnTop>
<widgets/>
<previewExpression>ID</previewExpression>
<mapTip></mapTip>
</maplayer>
<maplayer hasScaleBasedVisibilityFlag="0" styleCategories="AllStyleCategories" refreshOnNotifyEnabled="0" autoRefreshTime="0" minScale="1e+08" refreshOnNotifyMessage="" type="raster" autoRefreshEnabled="0" maxScale="0">
<extent>
<xmin>{extent_xmin}</xmin>
<ymin>{extent_ymin}</ymin>
<xmax>{extent_xmax}</xmax>
<ymax>{extent_ymax}</ymax>
</extent>
<id>Landuses__{landuse_name}__f7ec5ca9_3dce_4d3e_8def_9e31ecc6c163</id>
<datasource>./Watershed/Rasters/Landuse/{landuse_file_name}</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Landuses ({landuse_name})</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type></type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider>gdal</provider>
<noData>
<noDataList useSrcNoData="1" bandNo="1"/>
</noData>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<customproperties>
<property value="Value" key="identify/format"/>
</customproperties>
<pipe>
<rasterrenderer opacity="1" alphaBand="-1" band="1" classificationMin="nan" type="singlebandpseudocolor" classificationMax="nan">
<minMaxOrigin>
<limits>None</limits>
<extent>WholeRaster</extent>
<statAccuracy>Estimated</statAccuracy>
<cumulativeCutLower>0.02</cumulativeCutLower>
<cumulativeCutUpper>0.98</cumulativeCutUpper>
<stdDevFactor>2</stdDevFactor>
</minMaxOrigin>
<rastershader>
<colorrampshader clip="0" colorRampType="DISCRETE" classificationMode="1">
<item value="0" color="#a2a3c2" label="AGRL" alpha="255"/>
<item value="2" color="#c9dce1" label="AGRL" alpha="255"/>
<item value="3" color="#4b0105" label="PAST" alpha="255"/>
<item value="4" color="#60f22b" label="FRST" alpha="255"/>
</colorrampshader>
</rastershader>
</rasterrenderer>
<brightnesscontrast contrast="0" brightness="0"/>
<huesaturation colorizeStrength="100" colorizeOn="0" grayscaleMode="0" colorizeGreen="128" saturation="0" colorizeBlue="128" colorizeRed="255"/>
<rasterresampler maxOversampling="2"/>
</pipe>
<blendMode>0</blendMode>
</maplayer>
<maplayer geometry="Point" maxScale="-4.65661e-10" refreshOnNotifyEnabled="0" type="vector" styleCategories="AllStyleCategories" simplifyDrawingHints="0" simplifyMaxScale="1" autoRefreshEnabled="0" labelsEnabled="0" simplifyLocal="1" wkbType="Point" minScale="1e+08" simplifyDrawingTol="1" refreshOnNotifyMessage="" readOnly="0" hasScaleBasedVisibilityFlag="0" simplifyAlgorithm="0" autoRefreshTime="0">
<extent>
<xmin>328900.8826469536870718</xmin>
<ymin>1287794.26022111857309937</ymin>
<xmax>335980.8826469536870718</xmax>
<ymax>1291904.26022111857309937</ymax>
</extent>
<id>Pt_sources_and_reservoirs__reservoirs2__ada5d781_850f_43ac_825b_b807e28299e4</id>
<datasource>./Watershed/Shapes/reservoirs2.shp</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Pt sources and reservoirs (reservoirs2)</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type>dataset</type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider encoding="UTF-8">ogr</provider>
<vectorjoins/>
<layerDependencies/>
<dataDependencies/>
<legend type="default-vector"/>
<expressionfields/>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<auxiliaryLayer/>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<renderer-v2 enableorderby="0" forceraster="0" symbollevels="0" type="RuleRenderer">
<rules key--open-curly--53a471a4-aa86-43ed-9d97-be98a958b7e1--close-curly--">
<rule key--open-curly--cc102ca4-a33c-498d-a2fc-8e598f588d20--close-curly--" label="Reservoir" symbol="0" filter=" "INLET" = 0 AND "RES" = 1"/>
<rule key--open-curly--210e8415-28da-4360-9dcb-9c89a8497b13--close-curly--" label="Point source" description="Point source" symbol="1" filter=" "INLET" = 1 AND "PTSOURCE" = 1"/>
</rules>
<symbols>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="0">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="0,85,255,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="4"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="1">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="255,0,0,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="2"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
</symbols>
</renderer-v2>
<customproperties>
<property value="0" key="embeddedWidgets/count"/>
<property key="variableNames"/>
<property key="variableValues"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerOpacity>1</layerOpacity>
<SingleCategoryDiagramRenderer attributeLegend="1" diagramType="Histogram">
<DiagramCategory scaleBasedVisibility="0" lineSizeType="MM" sizeScale="3x:0,0,0,0,0,0" rotationOffset="270" minimumSize="0" diagramOrientation="Up" barWidth="5" maxScaleDenominator="1e+08" opacity="1" labelPlacementMethod="XHeight" backgroundAlpha="255" lineSizeScale="3x:0,0,0,0,0,0" backgroundColor="#ffffff" sizeType="MM" penColor="#000000" height="15" enabled="0" penWidth="0" penAlpha="255" minScaleDenominator="-4.65661e-10" scaleDependency="Area" width="15">
<fontProperties style="" description="Ubuntu,8,-1,5,1,0,0,0,0,0"/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings dist="0" linePlacementFlags="18" zIndex="0" showAll="1" placement="0" obstacle="0" priority="0">
<properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</properties>
</DiagramLayerSettings>
<geometryOptions geometryPrecision="0" removeDuplicateNodes="0">
<activeChecks/>
<checkConfiguration/>
</geometryOptions>
<fieldConfiguration>
<field name="ID">
<editWidget type="TextEdit">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="INLET">
<editWidget type="TextEdit">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="RES">
<editWidget type="TextEdit">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="PTSOURCE">
<editWidget type="TextEdit">
<config>
<Option/>
</config>
</editWidget>
</field>
</fieldConfiguration>
<aliases>
<alias field="ID" index="0" name=""/>
<alias field="INLET" index="1" name=""/>
<alias field="RES" index="2" name=""/>
<alias field="PTSOURCE" index="3" name=""/>
</aliases>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<defaults>
<default field="ID" applyOnUpdate="0" expression=""/>
<default field="INLET" applyOnUpdate="0" expression=""/>
<default field="RES" applyOnUpdate="0" expression=""/>
<default field="PTSOURCE" applyOnUpdate="0" expression=""/>
</defaults>
<constraints>
<constraint exp_strength="0" field="ID" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="INLET" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="RES" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="PTSOURCE" unique_strength="0" notnull_strength="0" constraints="0"/>
</constraints>
<constraintExpressions>
<constraint field="ID" exp="" desc=""/>
<constraint field="INLET" exp="" desc=""/>
<constraint field="RES" exp="" desc=""/>
<constraint field="PTSOURCE" exp="" desc=""/>
</constraintExpressions>
<expressionfields/>
<attributeactions>
<defaultAction value--open-curly--00000000-0000-0000-0000-000000000000--close-curly--" key="Canvas"/>
</attributeactions>
<attributetableconfig sortExpression="" sortOrder="0" actionWidgetStyle="dropDown">
<columns>
<column width="-1" hidden="0" type="field" name="ID"/>
<column width="-1" hidden="0" type="field" name="INLET"/>
<column width="-1" hidden="0" type="field" name="RES"/>
<column width="-1" hidden="0" type="field" name="PTSOURCE"/>
<column width="-1" hidden="1" type="actions"/>
</columns>
</attributetableconfig>
<conditionalstyles>
<rowstyles/>
<fieldstyles/>
</conditionalstyles>
<storedexpressions/>
<editform tolerant="1">../../../Documents</editform>
<editforminit/>
<editforminitcodesource>0</editforminitcodesource>
<editforminitfilepath></editforminitfilepath>
<editforminitcode><![CDATA[# -*- coding: utf-8 -*-
"""
QGIS forms can have a Python function that is called when the form is
opened.
Use this function to add extra logic to your forms.
Enter the name of the function in the "Python Init function"
field.
An example follows:
"""
from qgis.PyQt.QtWidgets import QWidget
def my_form_open(dialog, layer, feature):
geom = feature.geometry()
control = dialog.findChild(QWidget, "MyLineEdit")
]]></editforminitcode>
<featformsuppress>0</featformsuppress>
<editorlayout>generatedlayout</editorlayout>
<editable>
<field editable="1" name="ID"/>
<field editable="1" name="INLET"/>
<field editable="1" name="PTSOURCE"/>
<field editable="1" name="RES"/>
</editable>
<labelOnTop>
<field labelOnTop="0" name="ID"/>
<field labelOnTop="0" name="INLET"/>
<field labelOnTop="0" name="PTSOURCE"/>
<field labelOnTop="0" name="RES"/>
</labelOnTop>
<widgets/>
<previewExpression>ID</previewExpression>
<mapTip></mapTip>
</maplayer>
<maplayer hasScaleBasedVisibilityFlag="0" styleCategories="AllStyleCategories" refreshOnNotifyEnabled="0" autoRefreshTime="0" minScale="1e+08" refreshOnNotifyMessage="" type="raster" autoRefreshEnabled="0" maxScale="0">
<extent>
<xmin>326065.8826469536870718</xmin>
<ymin>1286069.26022111857309937</ymin>
<xmax>338065.8826469536870718</xmax>
<ymax>1293509.26022111857309937</ymax>
</extent>
<id>Slope_bands__{dem_name}slp_bands__daa1ee9a_d352_4de4_a12e_21aa0143f677</id>
<datasource>./Watershed/Rasters/DEM/{dem_name}slp_bands.tif</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Slope bands ({dem_name}slp_bands)</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type></type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider>gdal</provider>
<noData>
<noDataList useSrcNoData="0" bandNo="1"/>
</noData>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<customproperties>
<property value="Value" key="identify/format"/>
</customproperties>
<pipe>
<rasterrenderer opacity="1" alphaBand="-1" band="1" classificationMin="nan" type="singlebandpseudocolor" classificationMax="nan">
<minMaxOrigin>
<limits>None</limits>
<extent>WholeRaster</extent>
<statAccuracy>Estimated</statAccuracy>
<cumulativeCutLower>0.02</cumulativeCutLower>
<cumulativeCutUpper>0.98</cumulativeCutUpper>
<stdDevFactor>2</stdDevFactor>
</minMaxOrigin>
<rastershader>
<colorrampshader clip="0" colorRampType="DISCRETE" classificationMode="1">
<item value="0" color="#fafafa" label="0-5.0" alpha="255"/>
<item value="1" color="#050505" label="5.0-9999" alpha="255"/>
</colorrampshader>
</rastershader>
</rasterrenderer>
<brightnesscontrast contrast="0" brightness="0"/>
<huesaturation colorizeStrength="100" colorizeOn="0" grayscaleMode="0" colorizeGreen="128" saturation="0" colorizeBlue="128" colorizeRed="255"/>
<rasterresampler maxOversampling="2"/>
</pipe>
<blendMode>0</blendMode>
</maplayer>
<maplayer geometry="Point" maxScale="-4.65661e-10" refreshOnNotifyEnabled="0" type="vector" styleCategories="AllStyleCategories" simplifyDrawingHints="0" simplifyMaxScale="1" autoRefreshEnabled="0" labelsEnabled="0" simplifyLocal="1" wkbType="Point" minScale="1e+08" simplifyDrawingTol="1" refreshOnNotifyMessage="" readOnly="0" hasScaleBasedVisibilityFlag="0" simplifyAlgorithm="0" autoRefreshTime="0">
<extent>
<xmin>328867</xmin>
<ymin>1290227</ymin>
<xmax>328867</xmax>
<ymax>1290227</ymax>
</extent>
<id>Snapped_inlets_outlets__{outlet_name}_snap__2a54eb19_3da0_420d_b964_e4cd8efd371f</id>
<datasource>./Watershed/Shapes/{outlet_name}_snap.shp</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Snapped inlets/outlets ({outlet_name}_snap)</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type>dataset</type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider encoding="UTF-8">ogr</provider>
<vectorjoins/>
<layerDependencies/>
<dataDependencies/>
<legend type="default-vector"/>
<expressionfields/>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<auxiliaryLayer/>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<renderer-v2 enableorderby="0" forceraster="0" symbollevels="0" type="RuleRenderer">
<rules key--open-curly--53a471a4-aa86-43ed-9d97-be98a958b7e1--close-curly--">
<rule key--open-curly--57a0b081-f9fc-46d9-a9f0-302b4d29c4d2--close-curly--" label="Outlet" description="Outlet" symbol="0" filter=" "INLET" = 0 AND "RES" = 0"/>
<rule key--open-curly--2b092b12-ae87-4524-bf91-ee93b3bfee30--close-curly--" label="Inlet" description="Inlet" symbol="1" filter=" "INLET" = 1 AND "PTSOURCE" = 0"/>
<rule key--open-curly--cc102ca4-a33c-498d-a2fc-8e598f588d20--close-curly--" label="Reservoir" symbol="2" filter=" "INLET" = 0 AND "RES" = 1"/>
<rule key--open-curly--0ede00e4-44a0-41de-b5a4-41741e7a90ad--close-curly--" label="Pond" description="Pond" symbol="3" filter=""INLET" = 0 AND "RES" = 2"/>
<rule key--open-curly--bb3546f0-1b2c-49be-a16f-9bb5728352fd--close-curly--" label="Point source" description="Point source" symbol="4" filter=" "INLET" = 1 AND "PTSOURCE" = 1"/>
</rules>
<symbols>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="0">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="0,85,255,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="filled_arrowhead"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="4"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="1">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="180"/>
<prop k="color" v="255,0,0,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="filled_arrowhead"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="4"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="2">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="0,85,255,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="4"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="3">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="30,55,244,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="35,35,35,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="diameter"/>
<prop k="size" v="2.6"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="marker" name="4">
<layer pass="0" locked="0" enabled="1" class="SimpleMarker">
<prop k="angle" v="0"/>
<prop k="color" v="255,0,0,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="joinstyle" v="bevel"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="2"/>
<prop k="size_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
</symbols>
</renderer-v2>
<customproperties>
<property value=""ID"" key="dualview/previewExpressions"/>
<property value="0" key="embeddedWidgets/count"/>
<property key="variableNames"/>
<property key="variableValues"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerOpacity>1</layerOpacity>
<SingleCategoryDiagramRenderer attributeLegend="1" diagramType="Histogram">
<DiagramCategory scaleBasedVisibility="0" lineSizeType="MM" sizeScale="3x:0,0,0,0,0,0" rotationOffset="270" minimumSize="0" diagramOrientation="Up" barWidth="5" maxScaleDenominator="1e+08" opacity="1" labelPlacementMethod="XHeight" backgroundAlpha="255" lineSizeScale="3x:0,0,0,0,0,0" backgroundColor="#ffffff" sizeType="MM" penColor="#000000" height="15" enabled="0" penWidth="0" penAlpha="255" minScaleDenominator="-4.65661e-10" scaleDependency="Area" width="15">
<fontProperties style="" description="Ubuntu,8,-1,5,1,0,0,0,0,0"/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings dist="0" linePlacementFlags="18" zIndex="0" showAll="1" placement="0" obstacle="0" priority="0">
<properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</properties>
</DiagramLayerSettings>
<geometryOptions geometryPrecision="0" removeDuplicateNodes="0">
<activeChecks/>
<checkConfiguration/>
</geometryOptions>
<fieldConfiguration>
<field name="ID">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
<field name="INLET">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
<field name="RES">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
<field name="PTSOURCE">
<editWidget type="TextEdit">
<config>
<Option type="Map">
<Option value="0" type="QString" name="IsMultiline"/>
<Option value="0" type="QString" name="UseHtml"/>
</Option>
</config>
</editWidget>
</field>
</fieldConfiguration>
<aliases>
<alias field="ID" index="0" name=""/>
<alias field="INLET" index="1" name=""/>
<alias field="RES" index="2" name=""/>
<alias field="PTSOURCE" index="3" name=""/>
</aliases>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<defaults>
<default field="ID" applyOnUpdate="0" expression=""/>
<default field="INLET" applyOnUpdate="0" expression=""/>
<default field="RES" applyOnUpdate="0" expression=""/>
<default field="PTSOURCE" applyOnUpdate="0" expression=""/>
</defaults>
<constraints>
<constraint exp_strength="0" field="ID" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="INLET" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="RES" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="PTSOURCE" unique_strength="0" notnull_strength="0" constraints="0"/>
</constraints>
<constraintExpressions>
<constraint field="ID" exp="" desc=""/>
<constraint field="INLET" exp="" desc=""/>
<constraint field="RES" exp="" desc=""/>
<constraint field="PTSOURCE" exp="" desc=""/>
</constraintExpressions>
<expressionfields/>
<attributeactions>
<defaultAction value--open-curly--00000000-0000-0000-0000-000000000000--close-curly--" key="Canvas"/>
</attributeactions>
<attributetableconfig sortExpression="" sortOrder="0" actionWidgetStyle="dropDown">
<columns>
<column width="-1" hidden="0" type="field" name="PTSOURCE"/>
<column width="-1" hidden="0" type="field" name="RES"/>
<column width="-1" hidden="0" type="field" name="INLET"/>
<column width="-1" hidden="0" type="field" name="ID"/>
<column width="-1" hidden="1" type="actions"/>
</columns>
</attributetableconfig>
<conditionalstyles>
<rowstyles/>
<fieldstyles/>
</conditionalstyles>
<storedexpressions/>
<editform tolerant="1">../../../../../QSWATPlus_Projects/SanJuan/test1</editform>
<editforminit/>
<editforminitcodesource>0</editforminitcodesource>
<editforminitfilepath></editforminitfilepath>
<editforminitcode><![CDATA[# -*- coding: utf-8 -*-
"""
QGIS forms can have a Python function that is called when the form is
opened.
Use this function to add extra logic to your forms.
Enter the name of the function in the "Python Init function"
field.
An example follows:
"""
from qgis.PyQt.QtWidgets import QWidget
def my_form_open(dialog, layer, feature):
geom = feature.geometry()
control = dialog.findChild(QWidget, "MyLineEdit")
]]></editforminitcode>
<featformsuppress>0</featformsuppress>
<editorlayout>generatedlayout</editorlayout>
<editable>
<field editable="1" name="ID"/>
<field editable="1" name="INLET"/>
<field editable="1" name="PTSOURCE"/>
<field editable="1" name="RES"/>
</editable>
<labelOnTop>
<field labelOnTop="0" name="ID"/>
<field labelOnTop="0" name="INLET"/>
<field labelOnTop="0" name="PTSOURCE"/>
<field labelOnTop="0" name="RES"/>
</labelOnTop>
<widgets/>
<previewExpression>ID</previewExpression>
<mapTip></mapTip>
</maplayer>
<maplayer hasScaleBasedVisibilityFlag="0" styleCategories="AllStyleCategories" refreshOnNotifyEnabled="0" autoRefreshTime="0" minScale="1e+08" refreshOnNotifyMessage="" type="raster" autoRefreshEnabled="0" maxScale="0">
<extent>
<xmin>326002.06302211945876479</xmin>
<ymin>1286032.46381390024907887</ymin>
<xmax>338138.04992584581486881</xmax>
<ymax>1293574.11281835869885981</ymax>
</extent>
<id>Soils__{soil_name}_tif__2cd25288_d1b5_4e76_83af_39034c9f7ffd</id>
<datasource>./Watershed/Rasters/Soil/{soil_file_name}</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Soils ({soil_name})</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type></type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider>gdal</provider>
<noData>
<noDataList useSrcNoData="1" bandNo="1"/>
</noData>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<customproperties>
<property value="Value" key="identify/format"/>
</customproperties>
<pipe>
<rasterrenderer opacity="1" alphaBand="-1" band="1" classificationMin="nan" type="singlebandpseudocolor" classificationMax="nan">
<minMaxOrigin>
<limits>None</limits>
<extent>WholeRaster</extent>
<statAccuracy>Estimated</statAccuracy>
<cumulativeCutLower>0.02</cumulativeCutLower>
<cumulativeCutUpper>0.98</cumulativeCutUpper>
<stdDevFactor>2</stdDevFactor>
</minMaxOrigin>
<rastershader>
<colorrampshader clip="0" colorRampType="DISCRETE" classificationMode="1">
<item value="0" color="#089d97" label="LVx" alpha="255"/>
<item value="178" color="#abd7ed" label="VRe" alpha="255"/>
</colorrampshader>
</rastershader>
</rasterrenderer>
<brightnesscontrast contrast="0" brightness="0"/>
<huesaturation colorizeStrength="100" colorizeOn="0" grayscaleMode="0" colorizeGreen="128" saturation="0" colorizeBlue="128" colorizeRed="255"/>
<rasterresampler maxOversampling="2"/>
</pipe>
<blendMode>0</blendMode>
</maplayer>
<maplayer geometry="Line" maxScale="0" refreshOnNotifyEnabled="0" type="vector" styleCategories="AllStyleCategories" simplifyDrawingHints="0" simplifyMaxScale="1" autoRefreshEnabled="0" labelsEnabled="1" simplifyLocal="1" wkbType="MultiLineString" minScale="1e+08" simplifyDrawingTol="1" refreshOnNotifyMessage="" readOnly="0" hasScaleBasedVisibilityFlag="0" simplifyAlgorithm="0" autoRefreshTime="0">
<extent>
<xmin>328870.8826469536870718</xmin>
<ymin>1288124.26022111857309937</ymin>
<xmax>335260.8826469536870718</xmax>
<ymax>1291454.26022111857309937</ymax>
</extent>
<id>Streams__{dem_name}stream__6a837462_9d7d_48f0_a6c1_1710f553d03b</id>
<datasource>./Watershed/Shapes/{dem_name}stream.shp</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Streams ({dem_name}stream)</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type>dataset</type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider encoding="UTF-8">ogr</provider>
<vectorjoins/>
<layerDependencies/>
<dataDependencies/>
<legend type="default-vector"/>
<expressionfields/>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<auxiliaryLayer/>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<renderer-v2 enableorderby="0" forceraster="0" symbollevels="0" type="singleSymbol">
<symbols>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="line" name="0">
<layer pass="0" locked="0" enabled="1" class="SimpleLine">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="0,85,255,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.26"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="ring_filter" v="0"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
</symbols>
<rotation/>
<sizescale/>
</renderer-v2>
<customproperties/>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerOpacity>1</layerOpacity>
<geometryOptions geometryPrecision="0" removeDuplicateNodes="0">
<activeChecks/>
<checkConfiguration/>
</geometryOptions>
<fieldConfiguration>
<field name="LINKNO">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DSLINKNO">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="USLINKNO1">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="USLINKNO2">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DSNODEID">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Order">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Length">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Magnitude">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DS_Cont_Ar">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Drop">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Slope">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Straight_L">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="US_Cont_Ar">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="WSNO">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DOUT_END">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DOUT_START">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="DOUT_MID">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
</fieldConfiguration>
<aliases>
<alias field="LINKNO" index="0" name=""/>
<alias field="DSLINKNO" index="1" name=""/>
<alias field="USLINKNO1" index="2" name=""/>
<alias field="USLINKNO2" index="3" name=""/>
<alias field="DSNODEID" index="4" name=""/>
<alias field="Order" index="5" name=""/>
<alias field="Length" index="6" name=""/>
<alias field="Magnitude" index="7" name=""/>
<alias field="DS_Cont_Ar" index="8" name=""/>
<alias field="Drop" index="9" name=""/>
<alias field="Slope" index="10" name=""/>
<alias field="Straight_L" index="11" name=""/>
<alias field="US_Cont_Ar" index="12" name=""/>
<alias field="WSNO" index="13" name=""/>
<alias field="DOUT_END" index="14" name=""/>
<alias field="DOUT_START" index="15" name=""/>
<alias field="DOUT_MID" index="16" name=""/>
</aliases>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<defaults>
<default field="LINKNO" applyOnUpdate="0" expression=""/>
<default field="DSLINKNO" applyOnUpdate="0" expression=""/>
<default field="USLINKNO1" applyOnUpdate="0" expression=""/>
<default field="USLINKNO2" applyOnUpdate="0" expression=""/>
<default field="DSNODEID" applyOnUpdate="0" expression=""/>
<default field="Order" applyOnUpdate="0" expression=""/>
<default field="Length" applyOnUpdate="0" expression=""/>
<default field="Magnitude" applyOnUpdate="0" expression=""/>
<default field="DS_Cont_Ar" applyOnUpdate="0" expression=""/>
<default field="Drop" applyOnUpdate="0" expression=""/>
<default field="Slope" applyOnUpdate="0" expression=""/>
<default field="Straight_L" applyOnUpdate="0" expression=""/>
<default field="US_Cont_Ar" applyOnUpdate="0" expression=""/>
<default field="WSNO" applyOnUpdate="0" expression=""/>
<default field="DOUT_END" applyOnUpdate="0" expression=""/>
<default field="DOUT_START" applyOnUpdate="0" expression=""/>
<default field="DOUT_MID" applyOnUpdate="0" expression=""/>
</defaults>
<constraints>
<constraint exp_strength="0" field="LINKNO" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DSLINKNO" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="USLINKNO1" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="USLINKNO2" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DSNODEID" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Order" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Length" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Magnitude" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DS_Cont_Ar" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Drop" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Slope" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Straight_L" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="US_Cont_Ar" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="WSNO" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DOUT_END" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DOUT_START" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="DOUT_MID" unique_strength="0" notnull_strength="0" constraints="0"/>
</constraints>
<constraintExpressions>
<constraint field="LINKNO" exp="" desc=""/>
<constraint field="DSLINKNO" exp="" desc=""/>
<constraint field="USLINKNO1" exp="" desc=""/>
<constraint field="USLINKNO2" exp="" desc=""/>
<constraint field="DSNODEID" exp="" desc=""/>
<constraint field="Order" exp="" desc=""/>
<constraint field="Length" exp="" desc=""/>
<constraint field="Magnitude" exp="" desc=""/>
<constraint field="DS_Cont_Ar" exp="" desc=""/>
<constraint field="Drop" exp="" desc=""/>
<constraint field="Slope" exp="" desc=""/>
<constraint field="Straight_L" exp="" desc=""/>
<constraint field="US_Cont_Ar" exp="" desc=""/>
<constraint field="WSNO" exp="" desc=""/>
<constraint field="DOUT_END" exp="" desc=""/>
<constraint field="DOUT_START" exp="" desc=""/>
<constraint field="DOUT_MID" exp="" desc=""/>
</constraintExpressions>
<expressionfields/>
<attributeactions/>
<attributetableconfig sortExpression="" sortOrder="0" actionWidgetStyle="dropDown">
<columns/>
</attributetableconfig>
<conditionalstyles>
<rowstyles/>
<fieldstyles/>
</conditionalstyles>
<storedexpressions/>
<editform tolerant="1">../../../Documents</editform>
<editforminit/>
<editforminitcodesource>0</editforminitcodesource>
<editforminitfilepath></editforminitfilepath>
<editforminitcode><![CDATA[]]></editforminitcode>
<featformsuppress>0</featformsuppress>
<editorlayout>generatedlayout</editorlayout>
<editable/>
<labelOnTop/>
<widgets/>
<previewExpression></previewExpression>
<mapTip>ID</mapTip>
</maplayer>
<maplayer geometry="Polygon" maxScale="-4.65661e-10" refreshOnNotifyEnabled="0" type="vector" styleCategories="AllStyleCategories" simplifyDrawingHints="1" simplifyMaxScale="1" autoRefreshEnabled="0" labelsEnabled="1" simplifyLocal="1" wkbType="MultiPolygon" minScale="1e+08" simplifyDrawingTol="1" refreshOnNotifyMessage="" readOnly="0" hasScaleBasedVisibilityFlag="0" simplifyAlgorithm="0" autoRefreshTime="0">
<extent>
<xmin>328825.8826469536870718</xmin>
<ymin>1287329.26022111857309937</ymin>
<xmax>336355.8826469536870718</xmax>
<ymax>1292189.26022111857309937</ymax>
</extent>
<id>Subbasins__subs1__3017a81e_0174_439c_b815_cf54de0e0667</id>
<datasource>./Watershed/Shapes/subs1.shp</datasource>
<keywordList>
<value></value>
</keywordList>
<layername>Subbasins (subs1)</layername>
<srs>
<spatialrefsys>
<wkt>{prjcrs}</wkt>
<proj4>{proj4}</proj4>
<srsid>{srsid}</srsid>
<srid>{srid}</srid>
<authid>EPSG:{srid}</authid>
<description>{srs_description}</description>
<projectionacronym>{projectionacronym}</projectionacronym>
<ellipsoidacronym>{ellipsoidacronym}</ellipsoidacronym>
<geographicflag>{geographicflag}</geographicflag>
</spatialrefsys>
</srs>
<resourceMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type>dataset</type>
<title></title>
<abstract></abstract>
<links/>
<fees></fees>
<encoding></encoding>
<crs>
<spatialrefsys>
<wkt></wkt>
<proj4></proj4>
<srsid>0</srsid>
<srid>0</srid>
<authid></authid>
<description></description>
<projectionacronym></projectionacronym>
<ellipsoidacronym></ellipsoidacronym>
<geographicflag>false</geographicflag>
</spatialrefsys>
</crs>
<extent/>
</resourceMetadata>
<provider encoding="UTF-8">ogr</provider>
<vectorjoins/>
<layerDependencies/>
<dataDependencies/>
<legend type="default-vector"/>
<expressionfields/>
<map-layer-style-manager current="default">
<map-layer-style name="default"/>
</map-layer-style-manager>
<auxiliaryLayer/>
<flags>
<Identifiable>1</Identifiable>
<Removable>1</Removable>
<Searchable>1</Searchable>
</flags>
<renderer-v2 enableorderby="0" forceraster="0" symbollevels="0" type="RuleRenderer">
<rules key--open-curly--192bdd02-ed6d-4f65-842d-a83746e86517--close-curly--">
<rule key--open-curly--4b960711-df1e-4d23-bc17-d5ffaae25809--close-curly--" label="SWAT subbasin" description="Included in SWAT model" symbol="0" filter=""Subbasin" IS NULL OR "Subbasin" > 0 "/>
<rule key--open-curly--96ec9113-62fe-4bba-a4e0-917d85ec2586--close-curly--" label="Upstream from inlet" description="Excluded from SWAT model" symbol="1" filter=""Subbasin" = 0"/>
</rules>
<symbols>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="fill" name="0">
<layer pass="0" locked="0" enabled="1" class="SimpleFill">
<prop k="border_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="color" v="32,37,161,255"/>
<prop k="joinstyle" v="bevel"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="255,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0.26"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="style" v="no"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="fill" name="1">
<layer pass="0" locked="0" enabled="1" class="SimpleFill">
<prop k="border_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="color" v="255,255,255,255"/>
<prop k="joinstyle" v="bevel"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0.26"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="style" v="no"/>
<data_defined_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</data_defined_properties>
</layer>
</symbol>
</symbols>
</renderer-v2>
<labeling type="simple">
<settings calloutType="simple">
<text-style fontUnderline="0" blendMode="0" isExpression="1" fontStrikeout="0" textOrientation="horizontal" fontLetterSpacing="0" fieldName="CASE WHEN "Subbasin" = 0 THEN '' ELSE "Subbasin" END" useSubstitutions="0" fontItalic="0" fontSize="8.25" fontWeight="50" fontCapitals="0" previewBkgrdColor="255,255,255,255" textOpacity="1" fontKerning="1" fontSizeMapUnitScale="3x:0,0,0,0,0,0" fontFamily="MS Shell Dlg 2" fontWordSpacing="0" textColor="0,0,0,255" namedStyle="Normal" multilineHeight="1" fontSizeUnit="Point">
<text-buffer bufferBlendMode="0" bufferSizeMapUnitScale="3x:0,0,0,0,0,0" bufferSizeUnits="MM" bufferColor="255,255,255,255" bufferNoFill="0" bufferJoinStyle="64" bufferSize="1" bufferOpacity="1" bufferDraw="0"/>
<background shapeType="0" shapeRotation="0" shapeSizeY="0" shapeRadiiY="0" shapeFillColor="255,255,255,255" shapeOpacity="1" shapeOffsetX="0" shapeRotationType="0" shapeBorderColor="128,128,128,255" shapeBlendMode="0" shapeRadiiUnit="MM" shapeOffsetMapUnitScale="3x:0,0,0,0,0,0" shapeJoinStyle="64" shapeSizeX="0" shapeBorderWidthUnit="MM" shapeRadiiX="0" shapeDraw="0" shapeOffsetY="0" shapeBorderWidthMapUnitScale="3x:0,0,0,0,0,0" shapeSizeUnit="MM" shapeBorderWidth="0" shapeRadiiMapUnitScale="3x:0,0,0,0,0,0" shapeSVGFile="" shapeSizeMapUnitScale="3x:0,0,0,0,0,0" shapeSizeType="0" shapeOffsetUnit="MM"/>
<shadow shadowOffsetGlobal="1" shadowDraw="0" shadowOffsetAngle="135" shadowOffsetUnit="MM" shadowColor="0,0,0,255" shadowRadiusUnit="MM" shadowBlendMode="6" shadowUnder="0" shadowRadiusAlphaOnly="0" shadowOpacity="0.7" shadowRadiusMapUnitScale="3x:0,0,0,0,0,0" shadowOffsetDist="1" shadowRadius="1.5" shadowOffsetMapUnitScale="3x:0,0,0,0,0,0" shadowScale="100"/>
<dd_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</dd_properties>
<substitutions/>
</text-style>
<text-format plussign="0" useMaxLineLengthForAutoWrap="1" placeDirectionSymbol="0" decimals="3" leftDirectionSymbol="<" multilineAlign="0" addDirectionSymbol="0" rightDirectionSymbol=">" reverseDirectionSymbol="0" autoWrapLength="0" wrapChar="" formatNumbers="0"/>
<placement yOffset="0" repeatDistanceMapUnitScale="3x:0,0,0,0,0,0" distUnits="MM" overrunDistance="0" repeatDistance="0" geometryGenerator="" xOffset="0" maxCurvedCharAngleOut="-20" overrunDistanceMapUnitScale="3x:0,0,0,0,0,0" layerType="UnknownGeometry" placementFlags="0" placement="1" priority="5" dist="0" fitInPolygonOnly="0" geometryGeneratorType="PointGeometry" preserveRotation="1" distMapUnitScale="3x:0,0,0,0,0,0" centroidInside="1" quadOffset="4" offsetUnits="MapUnit" repeatDistanceUnits="MM" rotationAngle="0" labelOffsetMapUnitScale="3x:0,0,0,0,0,0" overrunDistanceUnit="MM" predefinedPositionOrder="TR,TL,BR,BL,R,L,TSR,BSR" centroidWhole="1" maxCurvedCharAngleIn="20" offsetType="0" geometryGeneratorEnabled="0"/>
<rendering fontMinPixelSize="3" limitNumLabels="0" scaleVisibility="0" displayAll="0" drawLabels="1" scaleMin="1" fontMaxPixelSize="10000" mergeLines="0" obstacleType="0" zIndex="0" scaleMax="10000000" labelPerPart="0" obstacleFactor="1" fontLimitPixelSize="0" maxNumLabels="2000" obstacle="1" upsidedownLabels="0" minFeatureSize="0"/>
<dd_properties>
<Option type="Map">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
</dd_properties>
<callout type="simple">
<Option type="Map">
<Option value="pole_of_inaccessibility" type="QString" name="anchorPoint"/>
<Option type="Map" name="ddProperties">
<Option value="" type="QString" name="name"/>
<Option name="properties"/>
<Option value="collection" type="QString" name="type"/>
</Option>
<Option value="false" type="bool" name="drawToAllParts"/>
<Option value="0" type="QString" name="enabled"/>
<Option value="<symbol clip_to_extent="1" force_rhr="0" alpha="1" type="line" name="symbol"><layer pass="0" locked="0" enabled="1" class="SimpleLine"><prop k="capstyle" v="square"/><prop k="customdash" v="5;2"/><prop k="customdash_map_unit_scale" v="3x:0,0,0,0,0,0"/><prop k="customdash_unit" v="MM"/><prop k="draw_inside_polygon" v="0"/><prop k="joinstyle" v="bevel"/><prop k="line_color" v="60,60,60,255"/><prop k="line_style" v="solid"/><prop k="line_width" v="0.3"/><prop k="line_width_unit" v="MM"/><prop k="offset" v="0"/><prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/><prop k="offset_unit" v="MM"/><prop k="ring_filter" v="0"/><prop k="use_custom_dash" v="0"/><prop k="width_map_unit_scale" v="3x:0,0,0,0,0,0"/><data_defined_properties><Option type="Map"><Option value="" type="QString" name="name"/><Option name="properties"/><Option value="collection" type="QString" name="type"/></Option></data_defined_properties></layer></symbol>" type="QString" name="lineSymbol"/>
<Option value="0" type="double" name="minLength"/>
<Option value="3x:0,0,0,0,0,0" type="QString" name="minLengthMapUnitScale"/>
<Option value="MM" type="QString" name="minLengthUnit"/>
<Option value="0" type="double" name="offsetFromAnchor"/>
<Option value="3x:0,0,0,0,0,0" type="QString" name="offsetFromAnchorMapUnitScale"/>
<Option value="MM" type="QString" name="offsetFromAnchorUnit"/>
<Option value="0" type="double" name="offsetFromLabel"/>
<Option value="3x:0,0,0,0,0,0" type="QString" name="offsetFromLabelMapUnitScale"/>
<Option value="MM" type="QString" name="offsetFromLabelUnit"/>
</Option>
</callout>
</settings>
</labeling>
<customproperties/>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerOpacity>1</layerOpacity>
<geometryOptions geometryPrecision="0" removeDuplicateNodes="0">
<activeChecks/>
<checkConfiguration/>
</geometryOptions>
<fieldConfiguration>
<field name="PolygonId">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Subbasin">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Area">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Slo1">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Len1">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Sll">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Lat">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Lon">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="Elev">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="ElevMin">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
<field name="ElevMax">
<editWidget type="">
<config>
<Option/>
</config>
</editWidget>
</field>
</fieldConfiguration>
<aliases>
<alias field="PolygonId" index="0" name=""/>
<alias field="Subbasin" index="1" name=""/>
<alias field="Area" index="2" name=""/>
<alias field="Slo1" index="3" name=""/>
<alias field="Len1" index="4" name=""/>
<alias field="Sll" index="5" name=""/>
<alias field="Lat" index="6" name=""/>
<alias field="Lon" index="7" name=""/>
<alias field="Elev" index="8" name=""/>
<alias field="ElevMin" index="9" name=""/>
<alias field="ElevMax" index="10" name=""/>
</aliases>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<defaults>
<default field="PolygonId" applyOnUpdate="0" expression=""/>
<default field="Subbasin" applyOnUpdate="0" expression=""/>
<default field="Area" applyOnUpdate="0" expression=""/>
<default field="Slo1" applyOnUpdate="0" expression=""/>
<default field="Len1" applyOnUpdate="0" expression=""/>
<default field="Sll" applyOnUpdate="0" expression=""/>
<default field="Lat" applyOnUpdate="0" expression=""/>
<default field="Lon" applyOnUpdate="0" expression=""/>
<default field="Elev" applyOnUpdate="0" expression=""/>
<default field="ElevMin" applyOnUpdate="0" expression=""/>
<default field="ElevMax" applyOnUpdate="0" expression=""/>
</defaults>
<constraints>
<constraint exp_strength="0" field="PolygonId" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Subbasin" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Area" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Slo1" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Len1" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Sll" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Lat" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Lon" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="Elev" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="ElevMin" unique_strength="0" notnull_strength="0" constraints="0"/>
<constraint exp_strength="0" field="ElevMax" unique_strength="0" notnull_strength="0" constraints="0"/>
</constraints>
<constraintExpressions>
<constraint field="PolygonId" exp="" desc=""/>
<constraint field="Subbasin" exp="" desc=""/>
<constraint field="Area" exp="" desc=""/>
<constraint field="Slo1" exp="" desc=""/>
<constraint field="Len1" exp="" desc=""/>
<constraint field="Sll" exp="" desc=""/>
<constraint field="Lat" exp="" desc=""/>
<constraint field="Lon" exp="" desc=""/>
<constraint field="Elev" exp="" desc=""/>
<constraint field="ElevMin" exp="" desc=""/>
<constraint field="ElevMax" exp="" desc=""/>
</constraintExpressions>
<expressionfields/>
<attributeactions/>
<attributetableconfig sortExpression="" sortOrder="0" actionWidgetStyle="dropDown">
<columns/>
</attributetableconfig>
<conditionalstyles>
<rowstyles/>
<fieldstyles/>
</conditionalstyles>
<storedexpressions/>
<editform tolerant="1">../../../Documents</editform>
<editforminit/>
<editforminitcodesource>0</editforminitcodesource>
<editforminitfilepath></editforminitfilepath>
<editforminitcode><![CDATA[]]></editforminitcode>
<featformsuppress>0</featformsuppress>
<editorlayout>generatedlayout</editorlayout>
<editable/>
<labelOnTop/>
<widgets/>
<previewExpression>"PolygonId"</previewExpression>
<mapTip></mapTip>
</maplayer>
</projectlayers>
<layerorder>
<layer id="DEM__{dem_name}__f751ab49_fdac_4766_be7f_300fbfe6adf2"/>
<layer id="Hillshade__{dem_name}hillshade__a6f33483_65e8_4cde_a966_948ff13f0c2a"/>
<layer id="Inlets_outlets__{outlet_name}__0c49465a_2a2b_4ecb_ae4f_fbb60c4c1bcb"/>
<layer id="Drawn_inlets_outlets__{outlet_name}__c41cb90c_f1d6_4ffe_8a64_99bcb575d961"/>
<layer id="Landuses__{landuse_name}__f7ec5ca9_3dce_4d3e_8def_9e31ecc6c163"/>
<layer id="Soils__{soil_name}_tif__2cd25288_d1b5_4e76_83af_39034c9f7ffd"/>
<layer id="Snapped_inlets_outlets__{outlet_name}_snap__2a54eb19_3da0_420d_b964_e4cd8efd371f"/>
<layer id="Streams__{dem_name}stream__6a837462_9d7d_48f0_a6c1_1710f553d03b"/>
<layer id="Channels__{dem_name}channel__a7e3608c_b71d_44f6_8194_67e56bb7c543"/>
<layer id="Full_LSUs__lsus1__8f4e9cfb_3ca6_4a70_83b9_fe977379bcf4"/>
<layer id="Full_HRUs__hrus1__4e2ba365_e7bd_4f8e_9d6c_79056945afb5"/>
<layer id="Slope_bands__{dem_name}slp_bands__daa1ee9a_d352_4de4_a12e_21aa0143f677"/>
<layer id="Pt_sources_and_reservoirs__reservoirs2__ada5d781_850f_43ac_825b_b807e28299e4"/>
<layer id="Channel_reaches__rivs1__514d2d76_3dcd_4834_8bd4_42392284ab2f"/>
<layer id="Actual_HRUs__hrus2__7adc36e1_3c7f_40db_8b2c_bb4f79fa3338"/>
<layer id="Subbasins__subs1__3017a81e_0174_439c_b815_cf54de0e0667"/>
</layerorder>
<properties>
<Gui>
<CanvasColorBluePart type="int">255</CanvasColorBluePart>
<CanvasColorGreenPart type="int">255</CanvasColorGreenPart>
<CanvasColorRedPart type="int">255</CanvasColorRedPart>
<SelectionColorAlphaPart type="int">255</SelectionColorAlphaPart>
<SelectionColorBluePart type="int">0</SelectionColorBluePart>
<SelectionColorGreenPart type="int">255</SelectionColorGreenPart>
<SelectionColorRedPart type="int">255</SelectionColorRedPart>
</Gui>
<Legend>
<filterByMap type="bool">false</filterByMap>
</Legend>
<Measure>
<Ellipsoid type="QString">{ellipsoidacronym}</Ellipsoid>
</Measure>
<Measurement>
<AreaUnits type="QString">m2</AreaUnits>
<DistanceUnits type="QString">meters</DistanceUnits>
</Measurement>
<PAL>
<CandidatesLine type="int">50</CandidatesLine>
<CandidatesPoint type="int">16</CandidatesPoint>
<CandidatesPolygon type="int">30</CandidatesPolygon>
<DrawRectOnly type="bool">false</DrawRectOnly>
<DrawUnplaced type="bool">false</DrawUnplaced>
<SearchMethod type="int">0</SearchMethod>
<ShowingAllLabels type="bool">false</ShowingAllLabels>
<ShowingCandidates type="bool">false</ShowingCandidates>
<ShowingPartialsLabels type="bool">true</ShowingPartialsLabels>
<TextFormat type="int">0</TextFormat>
<UnplacedColor type="QString">255,0,0,255</UnplacedColor>
</PAL>
<Paths>
<Absolute type="bool">false</Absolute>
</Paths>
<PositionPrecision>
<Automatic type="bool">true</Automatic>
<DecimalPlaces type="int">2</DecimalPlaces>
</PositionPrecision>
<SpatialRefSys>
<ProjectionsEnabled type="int">1</ProjectionsEnabled>
</SpatialRefSys>
<{project_name}>
<delin>
<DEM type="QString">./Watershed/Rasters/DEM/{dem_file_name}</DEM>
<burn type="QString"></burn>
<channels type="QString">./Watershed/Shapes/{dem_name}channel.shp</channels>
<delinNet type="QString">./Watershed/Shapes/{dem_name}stream.shp</delinNet>
<drainageTable type="QString"></drainageTable>
<existingWshed type="int">0</existingWshed>
<extraOutlets type="QString"></extraOutlets>
<gridDrainage type="int">0</gridDrainage>
<gridSize type="int">0</gridSize>
<lakePointsAdded type="int">0</lakePointsAdded>
<lakes type="QString"></lakes>
<lakesDone type="int">0</lakesDone>
<net type="QString">./Watershed/Shapes/{dem_name}stream.shp</net>
<outlets type="QString">./Watershed/Shapes/{outlet_name}.shp</outlets>
<snapOutlets type="QString">./Watershed/Shapes/{outlet_name}_snap.shp</snapOutlets>
<snapThreshold type="int">{snap_threshold}</snapThreshold>
<streamDrainage type="int">1</streamDrainage>
<subbasins type="QString">./Watershed/Shapes/{dem_name}subbasins.shp</subbasins>
<thresholdCh type="int">{channel_threshold}</thresholdCh>
<thresholdSt type="int">{stream_threshold}</thresholdSt>
<useGridModel type="int">0</useGridModel>
<useOutlets type="int">1</useOutlets>
<verticalUnits type="QString">metres</verticalUnits>
<wshed type="QString">./Watershed/Shapes/{dem_name}wshed.shp</wshed>
</delin>
<hru>
<areaVal type="int">{area_val}</areaVal>
<elevBandsThreshold type="int">0</elevBandsThreshold>
<isArea type="int">{is_area}</isArea>
<isDominantHRU type="int">{is_dominant_hru}</isDominantHRU>
<isMultiple type="int">{is_multiple}</isMultiple>
<isTarget type="int">{is_target}</isTarget>
<landuseVal type="int">{hru_land_thres}</landuseVal>
<numElevBands type="int">0</numElevBands>
<slopeBands type="QString">[{slope_classes}]</slopeBands>
<slopeBandsFile type="QString">./Watershed/Rasters/DEM/{dem_name}slp_bands.tif</slopeBandsFile>
<slopeVal type="int">{hru_slope_thres}</slopeVal>
<soilVal type="int">{hru_soil_thres}</soilVal>
<targetVal type="int">{target_val}</targetVal>
<useArea type="int">{use_area}</useArea>
</hru>
<landuse>
<file type="QString">./Watershed/Rasters/Landuse/{landuse_file_name}</file>
<plant type="QString">plant</plant>
<table type="QString">{land_lookup}</table>
<urban type="QString">urban</urban>
<water type="int">1</water>
</landuse>
<lsu>
<channelMergeByPercent type="int">1</channelMergeByPercent>
<channelMergeVal type="int">0</channelMergeVal>
<floodplainFile type="QString"></floodplainFile>
<thresholdResNoFlood type="int">101</thresholdResNoFlood>
<useLandscapes type="int">0</useLandscapes>
<useLeftRight type="int">0</useLeftRight>
</lsu>
<soil>
<database type="QString">./{project_name}.sqlite</database>
<databaseTable type="QString">{usersoil}</databaseTable>
<file type="QString">./Watershed/Rasters/Soil/{soil_file_name}</file>
<table type="QString">{soil_lookup}</table>
<useSSURGO type="int">0</useSSURGO>
<useSTATSGO type="int">0</useSTATSGO>
</soil>
</{project_name}>
</properties>
<visibility-presets/>
<transformContext/>
<projectMetadata>
<identifier></identifier>
<parentidentifier></parentidentifier>
<language></language>
<type></type>
<title>{project_name}</title>
<abstract></abstract>
<links/>
<author>Celray James</author>
<creation>2020-03-04T15:58:23</creation>
</projectMetadata>
<Annotations/>
<Layouts/>
<Bookmarks/>
<ProjectViewSettings UseProjectScales="0">
<Scales/>
</ProjectViewSettings>
</qgis>
''' | 45.463496 | 1,567 | 0.585761 |
793ef97a555a0b9d790255d66dad3d6bae2846a0 | 130 | py | Python | oscar/lib/python2.7/site-packages/txclib/__init__.py | bhav11esh/Oscar-Bookshelf | b48f088e2ed908b3603f2ecc63d602f81392eac4 | [
"BSD-3-Clause"
] | null | null | null | oscar/lib/python2.7/site-packages/txclib/__init__.py | bhav11esh/Oscar-Bookshelf | b48f088e2ed908b3603f2ecc63d602f81392eac4 | [
"BSD-3-Clause"
] | null | null | null | oscar/lib/python2.7/site-packages/txclib/__init__.py | bhav11esh/Oscar-Bookshelf | b48f088e2ed908b3603f2ecc63d602f81392eac4 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# https://www.python.org/dev/peps/pep-0440/#examples-of-compliant-version-schemes
__version__ = '0.12.4'
| 26 | 81 | 0.684615 |
793efa5b83afd27a62ed9d94284dadbd36f2c0f9 | 5,572 | py | Python | recipes/avahi/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | 562 | 2019-09-04T12:23:43.000Z | 2022-03-29T16:41:43.000Z | recipes/avahi/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | 9,799 | 2019-09-04T12:02:11.000Z | 2022-03-31T23:55:45.000Z | recipes/avahi/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | 1,126 | 2019-09-04T11:57:46.000Z | 2022-03-31T16:43:38.000Z | from conans import ConanFile, tools, AutoToolsBuildEnvironment
from conans.errors import ConanInvalidConfiguration
import os
required_conan_version = ">=1.33.0"
class AvahiConan(ConanFile):
name = "avahi"
# --enable-compat-libdns_sd means that this recipe provides the mdnsresponder compile interface
provides = "mdnsresponder"
description = "Avahi - Service Discovery for Linux using mDNS/DNS-SD -- compatible with Bonjour"
topics = ("avahi", "Bonjour", "DNS-SD", "mDNS")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/lathiat/avahi"
license = "LGPL-2.1-only"
settings = "os", "arch", "compiler", "build_type"
generators = "pkg_config"
options = {
"shared": [True, False],
"fPIC": [True, False]
}
default_options = {
"shared": False,
"fPIC": True
}
_autotools = None
@property
def _source_subfolder(self):
return "source_subfolder"
def requirements(self):
self.requires("glib/2.68.3")
self.requires("expat/2.4.1")
self.requires("libdaemon/0.14")
self.requires("dbus/1.12.20")
self.requires("gdbm/1.19")
self.requires("libevent/2.1.12")
def validate(self):
if self.settings.os != "Linux" or tools.cross_building(self):
raise ConanInvalidConfiguration("Only Linux is supported for this package.")
def configure(self):
del self.settings.compiler.cppstd
del self.settings.compiler.libcxx
if self.options.shared:
del self.options.fPIC
def source(self):
tools.get(**self.conan_data["sources"][self.version],
destination=self._source_subfolder, strip_root=True)
@property
def _configure_args(self):
yes_no = lambda v: "yes" if v else "no"
return [
"--enable-shared={}".format(yes_no(self.options.shared)),
"--enable-static={}".format(yes_no(not self.options.shared)),
"--disable-gtk3",
"--disable-mono",
"--disable-python",
"--disable-qt5",
"--disable-monodoc",
"--enable-compat-libdns_sd",
"--with-systemdsystemunitdir={}/lib/systemd/system".format(self.package_folder),
]
def _configure_autotools(self):
if self._autotools:
return self._autotools
self._autotools = AutoToolsBuildEnvironment(self)
self._autotools.configure(configure_dir=self._source_subfolder, args=self._configure_args)
return self._autotools
def build(self):
autotools = self._configure_autotools()
autotools.make()
def package(self):
autotools = self._configure_autotools()
autotools.install()
self.copy("LICENSE", dst="licenses", src=self._source_subfolder)
tools.rmdir(os.path.join(self.package_folder, "etc"))
tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
tools.remove_files_by_mask(os.path.join(self.package_folder, "lib"), "*.la")
tools.rmdir(os.path.join(self.package_folder, "share"))
def package_info(self):
self.cpp_info.names["cmake_find_package"] = "Avahi"
self.cpp_info.names["cmake_find_package_multi"] = "Avahi"
for lib in ("client", "common", "core", "glib", "gobject", "libevent", "compat-libdns_sd"):
avahi_lib = "avahi-{}".format(lib)
self.cpp_info.components[lib].names["cmake_find_package"] = lib
self.cpp_info.components[lib].names["cmake_find_package_multi"] = lib
self.cpp_info.components[lib].names["pkg_config"] = avahi_lib
self.cpp_info.components[lib].libs = [avahi_lib]
self.cpp_info.components[lib].includedirs = [os.path.join("include", avahi_lib)]
self.cpp_info.components["compat-libdns_sd"].libs = ["dns_sd"]
self.cpp_info.components["client"].requires = ["common", "dbus::dbus"]
self.cpp_info.components["common"].system_libs = ["pthread"]
self.cpp_info.components["core"].requires = ["common"]
self.cpp_info.components["glib"].requires = ["common", "glib::glib"]
self.cpp_info.components["gobject"].requires = ["client", "glib"]
self.cpp_info.components["libevent"].requires = ["common", "libevent::libevent"]
self.cpp_info.components["compat-libdns_sd"].requires = ["client"]
for app in ("autoipd", "browse", "daemon", "dnsconfd", "publish", "resolve", "set-host-name"):
avahi_app = "avahi-{}".format(app)
self.cpp_info.components[app].names["cmake_find_package"] = app
self.cpp_info.components[app].names["cmake_find_package_multi"] = app
self.cpp_info.components[app].names["pkg_config"] = avahi_app
self.cpp_info.components["autoipd"].requires = ["libdaemon::libdaemon"]
self.cpp_info.components["browse"].requires = ["client", "gdbm::gdbm"]
self.cpp_info.components["daemon"].requires = ["core", "expat::expat", "libdaemon::libdaemon"]
self.cpp_info.components["dnsconfd"].requires = ["common", "libdaemon::libdaemon"]
self.cpp_info.components["publish"].requires = ["client"]
self.cpp_info.components["resolve"].requires = ["client"]
self.cpp_info.components["set-host-name"].requires = ["client"]
bin_path = os.path.join(self.package_folder, "bin")
self.output.info("Appending PATH environment variable: {}".format(bin_path))
self.env_info.PATH.append(bin_path)
| 42.861538 | 102 | 0.639268 |
793efb43f421359668a567223f4464e9e01aa6ef | 3,805 | py | Python | blog/views.py | kritebh/django-blog | 5b3d76e4b41735368169cbd7ef54f32c233160d4 | [
"Apache-2.0"
] | null | null | null | blog/views.py | kritebh/django-blog | 5b3d76e4b41735368169cbd7ef54f32c233160d4 | [
"Apache-2.0"
] | null | null | null | blog/views.py | kritebh/django-blog | 5b3d76e4b41735368169cbd7ef54f32c233160d4 | [
"Apache-2.0"
] | null | null | null | from .forms import AddCommentForm
from django.contrib.auth.models import User
from django.shortcuts import redirect, render,get_object_or_404,HttpResponseRedirect
from django.contrib.auth.mixins import LoginRequiredMixin
from django.urls import reverse_lazy,reverse
from django.contrib.auth.decorators import login_required
from django.views.generic import ListView,DetailView,CreateView,UpdateView,DeleteView
# from .forms import UpdatePostForm,AddPostForm
from .models import Comment, Post,Category
# Create your views here.
class HomeView(ListView):
model = Post
template_name='blog/homepage.html'
ordering = ['-id']
def get_context_data(self,*args, **kwargs):
cat_menu = Category.objects.all()
context = super(HomeView,self).get_context_data(*args,**kwargs)
context["cat_menu"]=cat_menu
return context
class PostDetailView(DetailView):
model = Post
template_name ='blog/post_detail.html'
def get_context_data(self,*args, **kwargs):
cat_menu = Category.objects.all()
context = super(PostDetailView,self).get_context_data(*args,**kwargs)
like_obj = get_object_or_404(Post,id=self.kwargs['pk'])
total_likes = like_obj.total_likes()
context["cat_menu"]=cat_menu
context["total_likes"] = total_likes
return context
class AddPostView(LoginRequiredMixin,CreateView):
login_url = '/u/login/'
redirect_field_name = 'redirect_to'
model = Post
# form_class = AddPostForm
fields = ('title','featured_image','category','body')
template_name="blog/add_post.html"
# fields = '__all__'
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
class UpdatePostView(LoginRequiredMixin,UpdateView):
login_url = '/u/login/'
redirect_field_name = 'redirect_to'
model = Post
fields = ('title','featured_image','category','body')
template_name = 'blog/update_post.html'
class PostDeleteView(LoginRequiredMixin,DeleteView):
login_url = '/u/login/'
model = Post
template_name = 'blog/delete_post.html'
success_url = reverse_lazy('homepage')
def post_by_category(request,name):
# category = Category.objects.get(id=id)
get_cat_id = Category.objects.filter(name=name).values_list('pk',flat=True)
post = Post.objects.filter(category_id=int(get_cat_id[0])).order_by('-post_created_at')
cat_menu = Category.objects.all()
return render(request,'blog/category_post.html',{'post':post,'category':name,'cat_menu':cat_menu})
@login_required(redirect_field_name='redirect_to',login_url='login')
def like_post(request,pk):
post = get_object_or_404(Post,id=request.POST.get('post_id'))
post.likes.add(request.user)
return HttpResponseRedirect(reverse('detail',args=[str(pk)]))
def author_page(request,author):
get_author_id = User.objects.filter(username=author).values_list('pk',flat=True)
author_detail=User.objects.get(pk=int(get_author_id[0]))
post = Post.objects.filter(author_id=int(get_author_id[0])).order_by('-post_created_at')
return render(request,'blog/author_page.html',{'post':post,'author':author_detail})
class AddCommentView(CreateView):
model = Comment
form_class = AddCommentForm
# fields = '__all__'
# fields = ('title','featured_image','category','body')
template_name="blog/add_comment.html"
success_url=reverse_lazy('homepage')
def form_valid(self, form):
form.instance.post_id = self.kwargs['pk']
return super().form_valid(form)
def search(request):
query = request.GET['query']
posts = Post.objects.filter(title__icontains=query) | Post.objects.filter(body__icontains=query)
return render(request,'blog/search.html',{'post':posts,'query':query}) | 36.941748 | 102 | 0.72431 |
793efbc34dea3526e7837e8f1a09afdef570c0c6 | 5,019 | py | Python | input_process.py | ChenYizhu97/BRITS | 79dda68d3d76b6a0e49caa1a3a6f2bb837b8c959 | [
"MIT"
] | null | null | null | input_process.py | ChenYizhu97/BRITS | 79dda68d3d76b6a0e49caa1a3a6f2bb837b8c959 | [
"MIT"
] | null | null | null | input_process.py | ChenYizhu97/BRITS | 79dda68d3d76b6a0e49caa1a3a6f2bb837b8c959 | [
"MIT"
] | null | null | null | # coding: utf-8
import os
import re
import numpy as np
import pandas as pd
import pandas.io.json as json
patient_ids = []
for filename in os.listdir('./raw'):
# the patient data in PhysioNet contains 6-digits
match = re.search('\d{6}', filename)
if match:
id_ = match.group()
patient_ids.append(id_)
out = pd.read_csv('./raw/Outcomes-a.txt').set_index('RecordID')['In-hospital_death']
# we select 35 attributes which contains enough non-values
attributes = ['DiasABP', 'HR', 'Na', 'Lactate', 'NIDiasABP', 'PaO2', 'WBC', 'pH', 'Albumin', 'ALT', 'Glucose', 'SaO2',
'Temp', 'AST', 'Bilirubin', 'HCO3', 'BUN', 'RespRate', 'Mg', 'HCT', 'SysABP', 'FiO2', 'K', 'GCS',
'Cholesterol', 'NISysABP', 'TroponinT', 'MAP', 'TroponinI', 'PaCO2', 'Platelets', 'Urine', 'NIMAP',
'Creatinine', 'ALP']
# mean and std of 35 attributes
mean = np.array([59.540976152469405, 86.72320413227443, 139.06972964987443, 2.8797765291788986, 58.13833409690321,
147.4835678885565, 12.670222585415166, 7.490957887101613, 2.922874149659863, 394.8899400819931,
141.4867570064675, 96.66380228136883, 37.07362841054398, 505.5576196473552, 2.906465787821709,
23.118951553526724, 27.413004968675743, 19.64795551193981, 2.0277491155660416, 30.692432164676188,
119.60137167841977, 0.5404785381886381, 4.135790642787733, 11.407767149315339, 156.51746031746032,
119.15012244292181, 1.2004983498349853, 80.20321011673151, 7.127188940092161, 40.39875518672199,
191.05877024038804, 116.1171573535279, 77.08923183026529, 1.5052390166989214, 116.77122488658458])
std = np.array(
[13.01436781437145, 17.789923096504985, 5.185595006246348, 2.5287518090506755, 15.06074282896952, 85.96290370390257,
7.649058756791069, 8.384743923130074, 0.6515057685658769, 1201.033856726966, 67.62249645388543, 3.294112002091972,
1.5604879744921516, 1515.362517984297, 5.902070316876287, 4.707600932877377, 23.403743427107095, 5.50914416318306,
0.4220051299992514, 5.002058959758486, 23.730556355204214, 0.18634432509312762, 0.706337033602292,
3.967579823394297, 45.99491531484596, 21.97610723063014, 2.716532297586456, 16.232515568438338, 9.754483687298688,
9.062327978713556, 106.50939503021543, 170.65318497610315, 14.856134327604906, 1.6369529387005546,
133.96778334724377])
fs = open('./json/json', 'w')
def to_time_bin(x):
h, m = map(int, x.split(':'))
return h
def parse_data(x):
x = x.set_index('Parameter').to_dict()['Value']
values = []
for attr in attributes:
if attr in x:
values.append(x[attr])
else:
values.append(np.nan)
return values
def parse_delta(masks, dir_):
if dir_ == 'backward':
masks = masks[::-1]
deltas = []
for h in range(48):
if h == 0:
deltas.append(np.ones(35))
else:
deltas.append(np.ones(35) + (1 - masks[h]) * deltas[-1])
return np.array(deltas)
def parse_rec(values, masks, evals, eval_masks, dir_):
deltas = parse_delta(masks, dir_)
# only used in GRU-D
forwards = pd.DataFrame(values).fillna(method='ffill').fillna(0.0).values
rec = {}
rec['values'] = np.nan_to_num(values).tolist()
rec['masks'] = masks.astype('int32').tolist()
# imputation ground-truth
rec['evals'] = np.nan_to_num(evals).tolist()
rec['eval_masks'] = eval_masks.astype('int32').tolist()
rec['forwards'] = forwards.tolist()
rec['deltas'] = deltas.tolist()
return rec
def parse_id(id_):
data = pd.read_csv('./raw/{}.txt'.format(id_))
# accumulate the records within one hour
data['Time'] = data['Time'].apply(lambda x: to_time_bin(x))
evals = []
# merge all the metrics within one hour
for h in range(48):
evals.append(parse_data(data[data['Time'] == h]))
evals = (np.array(evals) - mean) / std
shp = evals.shape
evals = evals.reshape(-1)
# randomly eliminate 10% values as the imputation ground-truth
indices = np.where(~np.isnan(evals))[0].tolist()
indices = np.random.choice(indices, len(indices) // 10)
values = evals.copy()
values[indices] = np.nan
masks = ~np.isnan(values)
eval_masks = (~np.isnan(values)) ^ (~np.isnan(evals))
evals = evals.reshape(shp)
values = values.reshape(shp)
masks = masks.reshape(shp)
eval_masks = eval_masks.reshape(shp)
label = out.loc[int(id_)]
rec = {'label': label}
# prepare the model for both directions
rec['forward'] = parse_rec(values, masks, evals, eval_masks, dir_='forward')
rec['backward'] = parse_rec(values[::-1], masks[::-1], evals[::-1], eval_masks[::-1], dir_='backward')
rec = json.dumps(rec)
fs.write(rec + '\n')
for id_ in patient_ids:
print('Processing patient {}'.format(id_))
try:
parse_id(id_)
except Exception as e:
print(e)
continue
fs.close()
| 32.590909 | 120 | 0.650329 |
793efc5575f3a62840771ce0d53197978567ec7a | 7,292 | py | Python | hyperglass/models/api/query.py | blkmajik/hyperglass | c52a6f609843177671d38bcad59b8bd658f46b64 | [
"BSD-3-Clause-Clear"
] | 298 | 2019-06-17T13:51:46.000Z | 2021-06-23T18:09:51.000Z | hyperglass/models/api/query.py | blkmajik/hyperglass | c52a6f609843177671d38bcad59b8bd658f46b64 | [
"BSD-3-Clause-Clear"
] | 137 | 2019-06-18T12:59:37.000Z | 2021-06-19T05:50:58.000Z | hyperglass/models/api/query.py | blkmajik/hyperglass | c52a6f609843177671d38bcad59b8bd658f46b64 | [
"BSD-3-Clause-Clear"
] | 42 | 2019-06-18T07:25:23.000Z | 2021-06-18T17:40:20.000Z | """Input query validation model."""
# Standard Library
import json
import hashlib
import secrets
from datetime import datetime
# Third Party
from pydantic import BaseModel, StrictStr, constr, validator
# Project
from hyperglass.exceptions import InputInvalid
from hyperglass.configuration import params, devices
# Local
from .types import SupportedQuery
from .validators import (
validate_ip,
validate_aspath,
validate_community_input,
validate_community_select,
)
from ..config.vrf import Vrf
def get_vrf_object(vrf_name: str) -> Vrf:
"""Match VRF object from VRF name."""
for vrf_obj in devices.vrf_objects:
if vrf_name is not None:
if vrf_name == vrf_obj._id or vrf_name == vrf_obj.display_name:
return vrf_obj
elif vrf_name == "__hyperglass_default" and vrf_obj.default:
return vrf_obj
elif vrf_name is None:
if vrf_obj.default:
return vrf_obj
raise InputInvalid(params.messages.vrf_not_found, vrf_name=vrf_name)
class Query(BaseModel):
"""Validation model for input query parameters."""
query_location: StrictStr
query_type: SupportedQuery
query_vrf: StrictStr
query_target: constr(strip_whitespace=True, min_length=1)
class Config:
"""Pydantic model configuration."""
extra = "allow"
fields = {
"query_location": {
"title": params.web.text.query_location,
"description": "Router/Location Name",
"example": "router01",
},
"query_type": {
"title": params.web.text.query_type,
"description": "Type of Query to Execute",
"example": "bgp_route",
},
"query_vrf": {
"title": params.web.text.query_vrf,
"description": "Routing Table/VRF",
"example": "default",
},
"query_target": {
"title": params.web.text.query_target,
"description": "IP Address, Community, or AS Path",
"example": "1.1.1.0/24",
},
}
schema_extra = {
"x-code-samples": [{"lang": "Python", "source": "print('stuff')"}]
}
def __init__(self, **kwargs):
"""Initialize the query with a UTC timestamp at initialization time."""
super().__init__(**kwargs)
self.timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
def __repr__(self):
"""Represent only the query fields."""
return (
f"Query(query_location={str(self.query_location)}, "
f"query_type={str(self.query_type)}, query_vrf={str(self.query_vrf)}, "
f"query_target={str(self.query_target)})"
)
def digest(self):
"""Create SHA256 hash digest of model representation."""
return hashlib.sha256(repr(self).encode()).hexdigest()
def random(self):
"""Create a random string to prevent client or proxy caching."""
return hashlib.sha256(
secrets.token_bytes(8) + repr(self).encode() + secrets.token_bytes(8)
).hexdigest()
@property
def summary(self):
"""Create abbreviated representation of instance."""
items = (
f"query_location={self.query_location}",
f"query_type={self.query_type}",
f"query_vrf={self.query_vrf.name}",
f"query_target={str(self.query_target)}",
)
return f'Query({", ".join(items)})'
@property
def device(self):
"""Get this query's device object by query_location."""
return devices[self.query_location]
@property
def query(self):
"""Get this query's configuration object."""
return params.queries[self.query_type]
def export_dict(self, pretty=False):
"""Create dictionary representation of instance."""
if pretty:
items = {
"query_location": self.device.name,
"query_type": self.query.display_name,
"query_vrf": self.query_vrf.display_name,
"query_target": str(self.query_target),
}
else:
items = {
"query_location": self.query_location,
"query_type": self.query_type,
"query_vrf": self.query_vrf._id,
"query_target": str(self.query_target),
}
return items
def export_json(self):
"""Create JSON representation of instance."""
return json.dumps(self.export_dict(), default=str)
@validator("query_type")
def validate_query_type(cls, value):
"""Ensure query_type is enabled."""
query = params.queries[value]
if not query.enable:
raise InputInvalid(
params.messages.feature_not_enabled,
level="warning",
feature=query.display_name,
)
return value
@validator("query_location")
def validate_query_location(cls, value):
"""Ensure query_location is defined."""
valid_id = value in devices._ids
valid_hostname = value in devices.hostnames
if not any((valid_id, valid_hostname)):
raise InputInvalid(
params.messages.invalid_field,
level="warning",
input=value,
field=params.web.text.query_location,
)
return value
@validator("query_vrf")
def validate_query_vrf(cls, value, values):
"""Ensure query_vrf is defined."""
vrf_object = get_vrf_object(value)
device = devices[values["query_location"]]
device_vrf = None
for vrf in device.vrfs:
if vrf == vrf_object:
device_vrf = vrf
break
if device_vrf is None:
raise InputInvalid(
params.messages.vrf_not_associated,
vrf_name=vrf_object.display_name,
device_name=device.name,
)
return device_vrf
@validator("query_target")
def validate_query_target(cls, value, values):
"""Validate query target value based on query_type."""
query_type = values["query_type"]
value = value.strip()
# Use relevant function based on query_type.
validator_map = {
"bgp_aspath": validate_aspath,
"bgp_community": validate_community_input,
"bgp_route": validate_ip,
"ping": validate_ip,
"traceroute": validate_ip,
}
validator_args_map = {
"bgp_aspath": (value,),
"bgp_community": (value,),
"bgp_route": (value, values["query_type"], values["query_vrf"]),
"ping": (value, values["query_type"], values["query_vrf"]),
"traceroute": (value, values["query_type"], values["query_vrf"]),
}
if params.queries.bgp_community.mode == "select":
validator_map["bgp_community"] = validate_community_select
validate_func = validator_map[query_type]
validate_args = validator_args_map[query_type]
return validate_func(*validate_args)
| 31.5671 | 83 | 0.582693 |
793efcd512267a5499aeeb2b7eff752199610901 | 657 | py | Python | swexpert/d4/sw_4261.py | ruslanlvivsky/python-algorithm | 2b49bed33cd0e95b8a1e758008191f4392b3f667 | [
"MIT"
] | 3 | 2021-07-18T14:40:24.000Z | 2021-08-14T18:08:13.000Z | swexpert/d4/sw_4261.py | jinsuSang/python-algorithm | 524849a0a7e71034d329fef63c4f384930334177 | [
"MIT"
] | null | null | null | swexpert/d4/sw_4261.py | jinsuSang/python-algorithm | 524849a0a7e71034d329fef63c4f384930334177 | [
"MIT"
] | null | null | null | phone = {
'2': 'abc',
'3': 'def',
'4': 'ghi',
'5': 'jkl',
'6': 'mno',
'7': 'pqrs',
'8': 'tuv',
'9': 'wxyz',
}
test_cases = int(input().strip())
for t in range(1, test_cases + 1):
S, N = input().strip().split()
words = input().strip().split()
letters = []
for num in S:
letters.append(phone[num])
result = 0
for word in words:
check = 1
if len(word) != len(letters):
continue
for i in range(len(word)):
if word[i] not in letters[i]:
check = 0
break
result += check
print('#{} {}'.format(t, result))
| 20.53125 | 41 | 0.442922 |
793efd55da690ad5a15f46b1a8af1219508408bc | 550 | py | Python | clients/signals.py | MartinMwiti/crm | 2f775b25e293da410dc3a37d0e03d838df1144ac | [
"MIT"
] | null | null | null | clients/signals.py | MartinMwiti/crm | 2f775b25e293da410dc3a37d0e03d838df1144ac | [
"MIT"
] | null | null | null | clients/signals.py | MartinMwiti/crm | 2f775b25e293da410dc3a37d0e03d838df1144ac | [
"MIT"
] | null | null | null | from .models import Purchaser, paymentInvoice
from django.db.models.signals import post_save
from django.dispatch import receiver
# DISCLAIMER: CREATING THIS SIGNAL WILL CAUSE A DUPLICATE IN PAYMENTINVOICE WHENEVER I CREATE AN INSTANCE USING PAYMENTINVOICE. (1st will take the default values as caused by the signal, the 2nd will use the POST request validated_data)
# @receiver(post_save, sender=Purchaser)
# def create_profile(sender, instance, created, **kwargs):
# if created:
# paymentInvoice.objects.create(invoiceOwner=instance)
| 50 | 236 | 0.794545 |
793efe1f1443841885349471c9fc05df38d96aaa | 1,676 | py | Python | support/closure-library/closure/bin/build/jscompiler.py | joe-greenawalt/skulpt | 1db078e2f6d453403287233254b012bf31960ef4 | [
"MIT"
] | 2 | 2021-01-10T16:19:38.000Z | 2021-06-14T22:09:59.000Z | support/closure-library/closure/bin/build/jscompiler.py | csev/skulpt | 9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f | [
"MIT"
] | null | null | null | support/closure-library/closure/bin/build/jscompiler.py | csev/skulpt | 9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f | [
"MIT"
] | 1 | 2015-06-28T18:58:22.000Z | 2015-06-28T18:58:22.000Z | # Copyright 2010 The Closure Library Authors. All Rights Reserved.
"""Utility to use the Closure Compiler CLI from Python."""
import distutils.version
import logging
import re
import subprocess
# Pulls a version number from the first line of 'java -version'
_VERSION_REGEX = re.compile('[\.0-9]+')
def _GetJavaVersion():
"""Returns the string for the current version of Java installed."""
proc = subprocess.Popen(['java', '-version'], stderr=subprocess.PIPE)
unused_stdoutdata, stderrdata = proc.communicate()
version_line = stderrdata.splitlines()[0]
return _VERSION_REGEX.search(version_line).group()
def Compile(compiler_jar_path, source_paths, flags=None):
"""Prepares command-line call to Closure Compiler.
Args:
compiler_jar_path: Path to the Closure compiler .jar file.
source_paths: Source paths to build, in order.
flags: A list of additional flags to pass on to Closure Compiler.
Returns:
The compiled source, as a string, or None if compilation failed.
"""
# User friendly version check.
if not (distutils.version.LooseVersion(_GetJavaVersion()) >
distutils.version.LooseVersion('1.6')):
logging.error('Closure Compiler requires Java 1.6 or higher. '
'Please visit http://www.java.com/getjava')
return
args = ['java', '-jar', compiler_jar_path]
for path in source_paths:
args += ['--js', path]
if flags:
args += flags
logging.info('Compiling with the following command: %s', ' '.join(args))
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
stdoutdata, unused_stderrdata = proc.communicate()
if proc.returncode != 0:
return
return stdoutdata
| 28.896552 | 74 | 0.711814 |
793efe5bb53a20282600ca12b3bc6f269ef57be7 | 922 | py | Python | micasa/urls.py | kode-ai/JIRANI | cc31397fa9834da17d8fbba210fd6e29b904b118 | [
"MIT"
] | null | null | null | micasa/urls.py | kode-ai/JIRANI | cc31397fa9834da17d8fbba210fd6e29b904b118 | [
"MIT"
] | 2 | 2021-06-10T22:20:59.000Z | 2021-09-08T01:28:01.000Z | micasa/urls.py | kode-ai/JIRANI | cc31397fa9834da17d8fbba210fd6e29b904b118 | [
"MIT"
] | 1 | 2020-02-27T07:05:21.000Z | 2020-02-27T07:05:21.000Z | from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
url(r'^$', views.home, name='home'),
url(r'^upload/', views.upload_hood, name='upload'),
url(r'^profile/(?P<username>\w+)', views.profile, name='profile'),
url(r'^accounts/update/', views.edit, name='update_profile'),
url(r'^search/', views.search_results, name='search_results'),
url(r'^hood/(?P<hood_id>\d+)', views.hood, name='hood'),
url(r'^join(?P<hood_id>\d+)', views.join, name='join'),
url(r'^leave/(?P<hood_id>\d+)', views.leave, name='leave'),
url(r'^upload_business/', views.upload_business, name='upload_business'),
url(r'^post/', views.add_post, name='post'),
]
if settings.DEBUG:
urlpatterns+= static(settings.MEDIA_URL,document_root=settings.MEDIA_ROOT)
| 40.086957 | 78 | 0.686551 |
793efe5f4df3a90dcf00bed171a2ca7bf1725f0f | 5,098 | py | Python | kili/queries/dataset_asset/__init__.py | kili-technology/kili-playground | f8cf3c26a3a8fd4076cf2e87ea24a5f919e5aa80 | [
"Apache-2.0"
] | 214 | 2019-08-05T14:55:01.000Z | 2022-03-28T21:02:22.000Z | kili/queries/dataset_asset/__init__.py | x213212/kili-playground | dfb94c2d54bedfd7fec452b91f811587a2156c13 | [
"Apache-2.0"
] | 10 | 2020-05-14T10:44:16.000Z | 2022-03-08T09:39:24.000Z | kili/queries/dataset_asset/__init__.py | x213212/kili-playground | dfb94c2d54bedfd7fec452b91f811587a2156c13 | [
"Apache-2.0"
] | 19 | 2019-11-26T22:41:09.000Z | 2022-01-16T19:17:38.000Z | """
Dataset asset queries
"""
from typing import List, Optional
from typeguard import typechecked
import pandas as pd
from tqdm import tqdm
from ...helpers import Compatible, format_result, fragment_builder
from .queries import gql_assets, GQL_DATASET_ASSETS_COUNT
from ...types import DatasetAsset as DatasetAssetType
from ...orm import Asset
class QueriesDatasetAsset:
"""
Set of DatasetAsset queries
"""
# pylint: disable=too-many-arguments,too-many-locals
def __init__(self, auth):
"""
Initializes the subclass
Parameters
----------
- auth : KiliAuth object
"""
self.auth = auth
# pylint: disable=dangerous-default-value
@Compatible(['v2'])
@typechecked
def dataset_assets(self, asset_id: Optional[str] = None, dataset_id: Optional[str] = None,
skip: int = 0,
fields: list = ['content', 'createdAt', 'externalId', 'id', 'jsonMetadata'],
disable_tqdm: bool = False,
first: Optional[int] = None):
# pylint: disable=line-too-long
"""
Get an array of dataset assets respecting a set of constraints
Parameters
----------
- asset_id : str, optional (default = None)
The unique id of the asset to retrieve.
- dataset_id : str
Identifier of the dataset.
- skip : int, optional (default = None)
Number of assets to skip (they are ordered by their date of creation, first to last).
- fields : list of string, optional (default = ['content', 'createdAt',
'externalId', 'id', 'jsonMetadata'])
All the fields to request among the possible fields for the assets.
See [the documentation](https://cloud.kili-technology.com/docs/python-graphql-api/graphql-api/#datasetasset) for all possible fields.
- first : int, optional (default = None)
Maximum number of assets to return. Can only be between 0 and 100.
- disable_tqdm : bool, optional (default = False)
Returns
-------
- a result object which contains the query if it was successful, or an error message else.
Examples
-------
>>> kili.dataset_assets(dataset_id=dataset_id)
>>> kili.dataset_assets(asset_id=asset_id)
"""
saved_args = locals()
count_args = {k: v for (k, v) in saved_args.items()
if k not in ['skip', 'first', 'fields', 'self', 'disable_tqdm']}
number_of_assets_with_search = self.count_dataset_assets(**count_args)
total = min(number_of_assets_with_search,
first) if first is not None else number_of_assets_with_search
formatted_first = first if first else 100
if total == 0:
return []
with tqdm(total=total, disable=disable_tqdm) as pbar:
paged_assets = []
while True:
variables = {
'where': {
'id': asset_id,
'dataset': {
'id': dataset_id,
},
},
'skip': skip,
'first': formatted_first,
}
_gql_assets = gql_assets(
fragment_builder(fields, DatasetAssetType))
result = self.auth.client.execute(_gql_assets, variables)
assets = format_result('data', result, Asset)
if assets is None or len(assets) == 0 \
or (first is not None and len(paged_assets) == first):
return paged_assets
if first is not None:
assets = assets[:max(0, first - len(paged_assets))]
paged_assets += assets
skip += formatted_first
pbar.update(len(assets))
@Compatible(['v2'])
@typechecked
def count_dataset_assets(
self,
asset_id: Optional[str] = None,
dataset_id: Optional[str] = None):
"""
Count and return the number of assets with the given constraints
Parameters
----------
- asset_id : str, optional (default = None)
The unique id of the asset to retrieve.
- dataset_id : str
Identifier of the dataset.
Returns
-------
- a result object which contains the query if it was successful, or an error message else.
Examples
-------
>>> kili.count_dataset_assets(dataset_id=dataset_id)
23
>>> kili.count_dataset_assets(asset_id=asset_id)
1
"""
variables = {
'where': {
'id': asset_id,
'dataset': {
'id': dataset_id,
},
}
}
result = self.auth.client.execute(GQL_DATASET_ASSETS_COUNT, variables)
count = format_result('data', result)
return count
| 35.402778 | 145 | 0.546685 |
793eff90cce67f3a03ec834b3e723912798d63a2 | 13,071 | py | Python | src/prefect/cli/agent.py | dkapitan/prefect | 9a42c9f48dd9ff50a1e0d7ae2cf7606726bf28bf | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/prefect/cli/agent.py | dkapitan/prefect | 9a42c9f48dd9ff50a1e0d7ae2cf7606726bf28bf | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/prefect/cli/agent.py | dkapitan/prefect | 9a42c9f48dd9ff50a1e0d7ae2cf7606726bf28bf | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | import click
from prefect import config
from prefect.utilities.configuration import set_temporary_config
from prefect.utilities.serialization import from_qualified_name
_agents = {
"fargate": "prefect.agent.fargate.FargateAgent",
"docker": "prefect.agent.docker.DockerAgent",
"kubernetes": "prefect.agent.kubernetes.KubernetesAgent",
"local": "prefect.agent.local.LocalAgent",
"nomad": "prefect.agent.nomad.NomadAgent",
}
@click.group(hidden=True)
def agent():
"""
Manage Prefect agents.
\b
Usage:
$ prefect agent [COMMAND]
\b
Arguments:
start Start a Prefect agent
install Output platform-specific agent installation configs
\b
Examples:
$ prefect agent start
...agent begins running in process...
\b
$ prefect agent start kubernetes --token MY_TOKEN
...agent begins running in process...
\b
$ prefect agent install kubernetes --token MY_TOKEN --namespace metrics
...k8s yaml output...
"""
@agent.command(
hidden=True,
context_settings=dict(ignore_unknown_options=True, allow_extra_args=True),
)
@click.argument("agent-option", default="local")
@click.option(
"--token", "-t", required=False, help="A Prefect Cloud API token.", hidden=True
)
@click.option(
"--name",
"-n",
required=False,
help="A name to use for the agent",
hidden=True,
default=None,
)
@click.option(
"--verbose", "-v", is_flag=True, help="Enable verbose agent logs.", hidden=True
)
@click.option(
"--label",
"-l",
multiple=True,
help="Labels the agent will use to query for flow runs.",
hidden=True,
)
@click.option(
"--env",
"-e",
multiple=True,
help="Environment variables to set on each submitted flow run.",
hidden=True,
)
@click.option(
"--max-polls",
required=False,
help="Maximum number of polls for the agent",
hidden=True,
type=int,
)
@click.option(
"--namespace",
required=False,
help="Kubernetes namespace to create jobs.",
hidden=True,
)
@click.option(
"--import-path",
"-p",
multiple=True,
help="Import paths the local agent will add to all flow runs.",
hidden=True,
)
@click.option(
"--show-flow-logs",
"-f",
help="Display logging output from flows run by the agent.",
hidden=True,
is_flag=True,
)
@click.option("--no-pull", is_flag=True, help="Pull images flag.", hidden=True)
@click.option(
"--no-cloud-logs",
is_flag=True,
help="Turn off logging for all flows run through this agent.",
hidden=True,
)
@click.option("--base-url", "-b", help="Docker daemon base URL.", hidden=True)
@click.option(
"--volume",
multiple=True,
help="Host paths for Docker bind mount volumes attached to each Flow runtime container.",
hidden=True,
)
@click.option(
"--network", help="Add containers to an existing docker network", hidden=True,
)
@click.pass_context
def start(
ctx,
agent_option,
token,
name,
verbose,
label,
env,
namespace,
no_pull,
no_cloud_logs,
base_url,
import_path,
show_flow_logs,
volume,
network,
max_polls,
):
"""
Start an agent.
\b
Arguments:
agent-option TEXT The name of an agent to start (e.g. `docker`, `kubernetes`, `local`, `fargate`, `nomad`)
Defaults to `local`
\b
Options:
--token, -t TEXT A Prefect Cloud API token with RUNNER scope
--name, -n TEXT A name to use for the agent
--verbose, -v Enable verbose agent DEBUG logs
Defaults to INFO level logging
--label, -l TEXT Labels the agent will use to query for flow runs
Multiple values supported e.g. `-l label1 -l label2`
--env, -e TEXT Environment variables to set on each submitted flow run.
Note that equal signs in environment variable values are not currently supported from the CLI.
Multiple values supported e.g. `-e AUTH=token -e PKG_SETTING=true`
--max-polls INT Maximum number of times the agent should poll the Prefect API for flow runs. Will run forever
if not specified.
--no-cloud-logs Turn off logging to the Prefect API for all flow runs
Defaults to `False`
\b
Local Agent Options:
--import-path, -p TEXT Import paths which will be provided to each Flow's runtime environment.
Used for Flows which might import from scripts or local packages.
Multiple values supported e.g. `-p /root/my_scripts -p /utilities`
--show-flow-logs, -f Display logging output from flows run by the agent (available for Local and Docker agents only)
\b
Docker Agent Options:
--base-url, -b TEXT A Docker daemon host URL for a DockerAgent
--no-pull Pull images for a DockerAgent
Defaults to pulling if not provided
--volume TEXT Host paths for Docker bind mount volumes attached to each Flow runtime container.
Multiple values supported e.g. `--volume /some/path --volume /some/other/path`
--network TEXT Add containers to an existing docker network
\b
Kubernetes Agent Options:
--namespace TEXT A Kubernetes namespace to create Prefect jobs in
Defaults to env var `NAMESPACE` or `default`
\b
Fargate Agent Options:
Any of the configuration options outlined in the docs can be provided here
https://docs.prefect.io/orchestration/agents/fargate.html#configuration
"""
# Split context
kwargs = dict()
for item in ctx.args:
item = item.replace("--", "")
kwargs.update([item.split("=")])
tmp_config = {
"cloud.agent.auth_token": token or config.cloud.agent.auth_token,
"logging.log_to_cloud": False if no_cloud_logs else True,
}
if verbose:
tmp_config["cloud.agent.level"] = "DEBUG"
with set_temporary_config(tmp_config):
retrieved_agent = _agents.get(agent_option, None)
if not retrieved_agent:
click.secho("{} is not a valid agent".format(agent_option), fg="red")
return
env_vars = dict()
for env_var in env:
k, v = env_var.split("=")
env_vars[k] = v
if agent_option == "local":
from_qualified_name(retrieved_agent)(
name=name,
labels=list(label),
env_vars=env_vars,
max_polls=max_polls,
import_paths=list(import_path),
show_flow_logs=show_flow_logs,
).start()
elif agent_option == "docker":
from_qualified_name(retrieved_agent)(
name=name,
labels=list(label),
env_vars=env_vars,
max_polls=max_polls,
base_url=base_url,
no_pull=no_pull,
show_flow_logs=show_flow_logs,
volumes=list(volume),
network=network,
).start()
elif agent_option == "fargate":
from_qualified_name(retrieved_agent)(
name=name,
labels=list(label),
env_vars=env_vars,
max_polls=max_polls,
**kwargs
).start()
elif agent_option == "kubernetes":
from_qualified_name(retrieved_agent)(
namespace=namespace,
name=name,
labels=list(label),
env_vars=env_vars,
max_polls=max_polls,
).start()
else:
from_qualified_name(retrieved_agent)(
name=name, labels=list(label), env_vars=env_vars, max_polls=max_polls,
).start()
@agent.command(hidden=True)
@click.argument("name")
@click.option(
"--token", "-t", required=False, help="A Prefect Cloud API token.", hidden=True
)
@click.option("--api", "-a", required=False, help="A Prefect API URL.", hidden=True)
@click.option(
"--namespace",
"-n",
required=False,
help="Agent namespace to launch workloads.",
hidden=True,
)
@click.option(
"--image-pull-secrets",
"-i",
required=False,
help="Name of image pull secrets to use for workloads.",
hidden=True,
)
@click.option(
"--resource-manager", is_flag=True, help="Enable resource manager.", hidden=True
)
@click.option("--rbac", is_flag=True, help="Enable default RBAC.", hidden=True)
@click.option(
"--latest", is_flag=True, help="Use the latest Prefect image.", hidden=True
)
@click.option(
"--mem-request",
required=False,
help="Requested memory for Prefect init job.",
hidden=True,
)
@click.option(
"--mem-limit",
required=False,
help="Limit memory for Prefect init job.",
hidden=True,
)
@click.option(
"--cpu-request",
required=False,
help="Requested CPU for Prefect init job.",
hidden=True,
)
@click.option(
"--cpu-limit", required=False, help="Limit CPU for Prefect init job.", hidden=True
)
@click.option(
"--label",
"-l",
multiple=True,
help="Labels the agent will use to query for flow runs.",
hidden=True,
)
@click.option(
"--import-path",
"-p",
multiple=True,
help="Import paths the local agent will add to all flow runs.",
hidden=True,
)
@click.option(
"--show-flow-logs",
"-f",
help="Display logging output from flows run by the agent.",
hidden=True,
is_flag=True,
)
def install(
name,
token,
api,
namespace,
image_pull_secrets,
resource_manager,
rbac,
latest,
mem_request,
mem_limit,
cpu_request,
cpu_limit,
label,
import_path,
show_flow_logs,
):
"""
Install an agent. Outputs configuration text which can be used to install on various
platforms. The Prefect image version will default to your local `prefect.__version__`
\b
Arguments:
name TEXT The name of an agent to install (e.g. `kubernetes`, `local`)
\b
Options:
--token, -t TEXT A Prefect Cloud API token
--label, -l TEXT Labels the agent will use to query for flow runs
Multiple values supported e.g. `-l label1 -l label2`
\b
Kubernetes Agent Options:
--api, -a TEXT A Prefect API URL
--namespace, -n TEXT Agent namespace to launch workloads
--image-pull-secrets, -i TEXT Name of image pull secrets to use for workloads
--resource-manager Enable resource manager on install
--rbac Enable default RBAC on install
--latest Use the `latest` Prefect image
--mem-request TEXT Requested memory for Prefect init job
--mem-limit TEXT Limit memory for Prefect init job
--cpu-request TEXT Requested CPU for Prefect init job
--cpu-limit TEXT Limit CPU for Prefect init job
\b
Local Agent Options:
--import-path, -p TEXT Absolute import paths to provide to the local agent.
Multiple values supported e.g. `-p /root/my_scripts -p /utilities`
--show-flow-logs, -f Display logging output from flows run by the agent
"""
supported_agents = {
"kubernetes": "prefect.agent.kubernetes.KubernetesAgent",
"local": "prefect.agent.local.LocalAgent",
}
retrieved_agent = supported_agents.get(name, None)
if not retrieved_agent:
click.secho("{} is not a supported agent for `install`".format(name), fg="red")
return
if name == "kubernetes":
deployment = from_qualified_name(retrieved_agent).generate_deployment_yaml(
token=token,
api=api,
namespace=namespace,
image_pull_secrets=image_pull_secrets,
resource_manager_enabled=resource_manager,
rbac=rbac,
latest=latest,
mem_request=mem_request,
mem_limit=mem_limit,
cpu_request=cpu_request,
cpu_limit=cpu_limit,
labels=list(label),
)
click.echo(deployment)
elif name == "local":
conf = from_qualified_name(retrieved_agent).generate_supervisor_conf(
token=token,
labels=list(label),
import_paths=list(import_path),
show_flow_logs=show_flow_logs,
)
click.echo(conf)
| 31.496386 | 131 | 0.586566 |
793effa96e8e809d1d36e681c0a7abcce6183de2 | 1,485 | py | Python | tests/test_ogcapi_features_pygeoapi.py | beistehen/OWSLib | 9a92b1c86c4ae551fdb015f647e6a34ce69cdf9e | [
"BSD-3-Clause"
] | null | null | null | tests/test_ogcapi_features_pygeoapi.py | beistehen/OWSLib | 9a92b1c86c4ae551fdb015f647e6a34ce69cdf9e | [
"BSD-3-Clause"
] | null | null | null | tests/test_ogcapi_features_pygeoapi.py | beistehen/OWSLib | 9a92b1c86c4ae551fdb015f647e6a34ce69cdf9e | [
"BSD-3-Clause"
] | null | null | null | from tests.utils import service_ok
import pytest
from owslib.ogcapi.features import Features
SERVICE_URL = 'https://demo.pygeoapi.io/master'
@pytest.mark.online
@pytest.mark.skipif(not service_ok(SERVICE_URL),
reason='service is unreachable')
def test_ogcapi_features_pygeoapi():
w = Features(SERVICE_URL)
assert w.url == 'https://demo.pygeoapi.io/master/'
assert w.url_query_string is None
api = w.api()
assert api['components']['parameters'] is not None
paths = api['paths']
assert paths is not None
assert paths['/collections/lakes'] is not None
conformance = w.conformance()
assert len(conformance['conformsTo']) == 17
collections = w.collections()
assert len(collections) > 0
feature_collections = w.feature_collections()
assert len(feature_collections) > 0
lakes = w.collection('lakes')
assert lakes['id'] == 'lakes'
assert lakes['title'] == 'Large Lakes'
assert lakes['description'] == 'lakes of the world, public domain'
#lakes_queryables = w.collection_queryables('lakes')
#assert len(lakes_queryables['queryables']) == 6
# Minimum of limit param is 1
with pytest.raises(RuntimeError):
lakes_query = w.collection_items('lakes', limit=0)
lakes_query = w.collection_items('lakes', limit=1, admin='admin-0')
assert lakes_query['numberMatched'] == 25
assert lakes_query['numberReturned'] == 1
assert len(lakes_query['features']) == 1
| 29.7 | 71 | 0.686869 |
793f0015c49e5396a18a32ecc81f359e51561c19 | 1,055 | py | Python | hw1/hw1_18.py | allen860614/NTU_ML | 15373e39ea3fe02d48abffd7dbd2ac2f1dacea55 | [
"MIT"
] | null | null | null | hw1/hw1_18.py | allen860614/NTU_ML | 15373e39ea3fe02d48abffd7dbd2ac2f1dacea55 | [
"MIT"
] | null | null | null | hw1/hw1_18.py | allen860614/NTU_ML | 15373e39ea3fe02d48abffd7dbd2ac2f1dacea55 | [
"MIT"
] | null | null | null | import numpy as np
import random
# load data set
data = np.loadtxt("hw1_train.dat.txt")
# set sign()
def sign(x):
if x > 0:
return 1
else:
return -1
result = []
def PLA(): # PLA algorithm
num = 11 # the vector length
end = 0 # check whether finish or not
count = 0 # record the number of updates
i = 0 # point to the current data
w = num*[0.0] # weight vector
N = 100 # total data number
x = num*[10.0] # make x list initialize all 10.0
while end < 5*N:
i = random.randint(0, N-1)
x[1:num] = data[i][0:num-1] # replace vector x with data
if sign(np.dot(w, x)) != data[i][-1]: # find mistake
y = num*[data[i][-1]]
w += np.multiply(y, x) # update w to correct mistake
end = 0
count = count + 1
else:
end = end + 1
return count
for j in range(0, 1000):
result.append(PLA())
print(np.median(result)) | 24.534884 | 81 | 0.495735 |
793f00363cdfcd69db009c5993d0f43ee842a94d | 9,536 | py | Python | demo:v1/gesture_model.py | Acemyzoe/gesture-classifier | 3f83ff86e61c6f99ef762c2e146ab326c2f86e2d | [
"MIT"
] | null | null | null | demo:v1/gesture_model.py | Acemyzoe/gesture-classifier | 3f83ff86e61c6f99ef762c2e146ab326c2f86e2d | [
"MIT"
] | null | null | null | demo:v1/gesture_model.py | Acemyzoe/gesture-classifier | 3f83ff86e61c6f99ef762c2e146ab326c2f86e2d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten
from tensorflow.keras.layers import Conv2D, MaxPooling2D
from keras.utils import np_utils
import numpy as np
import os
import cv2
from PIL import Image
from matplotlib import pyplot as plt
# SKLEARN
from sklearn.utils import shuffle
from sklearn.model_selection import train_test_split
# input image dimensions
img_rows, img_cols = 200, 200
# For grayscale use 1 value and for color images use 3 (R,G,B channels)
img_channels = 1
batch_size = 32
nb_classes = 5
nb_epoch = 10 #25
## Path2 is the folder which is fed in to training model
path2 = './gesture-data'
gesture = ["OK", "NOTHING","PEACE", "PUNCH", "STOP"]
# This function can be used for converting colored img to Grayscale img
# while copying images from path1 to path2
def convertToGrayImg(path1, path2):
for dirname, _, filenames in os.walk(path1):
for filename in filenames:
path = os.path.join(dirname, filename)
print(os.path.join(dirname, filename))
if path.endswith("png"):
img = cv2.imread(path)
#SkindMask模式处理
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
#Apply skin color range
low_range = np.array([0, 50, 80])
upper_range = np.array([30, 200, 255])
skinkernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,(5,5))
mask = cv2.inRange(hsv, low_range, upper_range)
mask = cv2.erode(mask, skinkernel, iterations = 1)
mask = cv2.dilate(mask, skinkernel, iterations = 1)
#blur
mask = cv2.GaussianBlur(mask, (15,15), 1)
#cv2.imshow("Blur", mask)
#bitwise and mask original frame
res = cv2.bitwise_and(img, img, mask = mask)
# color to grayscale
img = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY)
cv2.imwrite(path2+'/'+filename,img)
def modlistdir(path, pattern = None):
listing = os.listdir(path)
retlist = []
for name in listing:
#This check is to ignore any hidden files/folders
if pattern == None:
if name.startswith('.'):
continue
else:
retlist.append(name)
elif name.endswith(pattern):
retlist.append(name)
return retlist
# init picture
def initializers():
imlist = modlistdir(path2)
image1 = np.array(Image.open(path2 +'/' + imlist[0])) # open one image to get size
#plt.imshow(im1)
m,n = image1.shape[0:2] # get the size of the images
total_images = len(imlist) # get the 'total' number of images
# create matrix to store all flattened images
immatrix = np.array([np.array(Image.open(path2+ '/' + images).convert('L')).flatten()
for images in sorted(imlist)], dtype = 'f')
print(immatrix.shape)
## Label the set of images per respective gesture type.
label=np.ones((total_images,),dtype = int)
samples_per_class = int(total_images / nb_classes)
print("samples_per_class - ",samples_per_class)
s = 0
r = samples_per_class
for classIndex in range(nb_classes):
label[s:r] = classIndex
s = r
r = s + samples_per_class
data,Label = shuffle(immatrix,label, random_state=2)
train_data = [data,Label]
(X, y) = (train_data[0],train_data[1])
# Split X and y into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=4)
X_train = X_train.reshape(X_train.shape[0], img_rows, img_cols, img_channels)
X_test = X_test.reshape(X_test.shape[0], img_rows, img_cols, img_channels)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
# normalize
X_train /= 255
X_test /= 255
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
return X_train, X_test, Y_train, Y_test
# Load CNN model
def CNN(bTraining = True):
model = Sequential()
model.add(Conv2D(32, (3, 3),
padding='same',
input_shape=(img_rows, img_cols,img_channels)))
model.add(Activation('relu'))
model.add(Conv2D(32, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.5))
model.add(Flatten())
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
model.summary()
# Model conig details
model.get_config()
if not bTraining :
model.load_weights('./gesture_weight.h5')
return model
def trainmodel(model):
# Split X and y into training and testing sets
X_train, X_test, Y_train, Y_test = initializers()
# Now start the training of the loaded model
hist = model.fit(X_train, Y_train, batch_size=batch_size, epochs=nb_epoch,
verbose=1, validation_split=0.2)
visualizeHis(hist)
ans = input("Do you want to save the trained weights - y/n ?")
if ans == 'y':
filename = input("Enter file name - ")
fname = str(filename) + ".h5"
model.save_weights(fname,overwrite=True)
else:
model.save("newmodel.h5",overwrite=True)
def visualizeHis(hist):
# visualizing losses and accuracy
train_loss=hist.history['loss']
val_loss=hist.history['val_loss']
train_acc=hist.history['accuracy']
val_acc=hist.history['val_accuracy']
xc=range(nb_epoch)
plt.figure(1,figsize=(7,5))
plt.plot(xc,train_loss)
plt.plot(xc,val_loss)
plt.xlabel('num of Epochs')
plt.ylabel('loss')
plt.title('train_loss vs val_loss')
plt.grid(True)
plt.legend(['train','val'])
plt.figure(2,figsize=(7,5))
plt.plot(xc,train_acc)
plt.plot(xc,val_acc)
plt.xlabel('num of Epochs')
plt.ylabel('accuracy')
plt.title('train_acc vs val_acc')
plt.grid(True)
plt.legend(['train','val'],loc=4)
plt.show()
def analysis(model,path):
img = cv2.imread(path)
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img = cv2.resize(img, (img_rows,img_cols))
x = np.expand_dims(img, axis = 0)
x = x.reshape(1,img_rows,img_cols,img_channels)
prediction = model.predict(x)
print(prediction[0])
#draw chart
y_pos = np.arange(len(gesture))
plt.bar(y_pos, prediction[0], align='center', alpha=1)
plt.xticks(y_pos, gesture)
plt.ylabel('percentage')
plt.title('gesture')
plt.show()
def capture(model):
cap = cv2.VideoCapture(0)
while(True):
ret,frame =cap.read()
frame = cv2.bilateralFilter(frame, 5, 50, 100) # 双边滤波
frame = cv2.flip(frame, 1) # 翻转 0:沿X轴翻转(垂直翻转) 大于0:沿Y轴翻转(水平翻转) 小于0:先沿X轴翻转,再沿Y轴翻转,等价于旋转180°
cv2.rectangle(frame, (int(0.6 * frame.shape[1]), 0),(frame.shape[1], int(0.4 * frame.shape[0])), (0, 0, 255), 2)
img = frame[0:int(0.4 * frame.shape[0]),int(0.6 * frame.shape[1]):frame.shape[1]] # 剪切右上角矩形框区域
modes = 'B'
if modes == 'B':
#二进制模式处理
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) #将的图像转换为灰度图
blur = cv2.GaussianBlur(gray, (5, 5), 2) #加高斯模糊
th3 = cv2.adaptiveThreshold(blur,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY_INV,11,2)
ret, img = cv2.threshold(th3, 60, 255,cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU) #二值化处理
else:
#SkindMask模式处理
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
#Apply skin color range
low_range = np.array([0, 50, 80])
upper_range = np.array([30, 200, 255])
skinkernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,(5,5))
mask = cv2.inRange(hsv, low_range, upper_range)
mask = cv2.erode(mask, skinkernel, iterations = 1)
mask = cv2.dilate(mask, skinkernel, iterations = 1)
#blur
mask = cv2.GaussianBlur(mask, (15,15), 1)
#cv2.imshow("Blur", mask)
#bitwise and mask original frame
res = cv2.bitwise_and(img, img, mask = mask)
# color to grayscale
img = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY)
img = cv2.resize(img,(img_rows,img_cols))
x = np.expand_dims(img, axis = 0)
x = x.reshape(1,img_rows,img_cols,img_channels)
prediction = model.predict(x)
cv2.rectangle(frame,(10,12),(160,160),(64,64,64),cv2.FILLED)
cv2.addWeighted(frame.copy(), 0.4, frame, 0.6, 0, frame)
ges = ""
for i in range(len(prediction[0])):
ges = "%s: %s%s" %(gesture[i],round(prediction[0][i]*100, 2),'%')
cv2.putText(frame, ges,(10,20+15*i),cv2.FONT_HERSHEY_SIMPLEX,0.5,(255,255,255),1)
cv2.imshow('original', frame)
cv2.imshow('img',img)
if cv2.waitKey(1) == 27:
break
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
mymodel = CNN(False)
#trainmodel(mymodel)
#analysis(mymodel,'./imgs/ssstop1.png')
capture(mymodel)
| 35.715356 | 120 | 0.616296 |
793f0261fa01d995b678c5972fcb20e3bb691eb4 | 204,903 | py | Python | wagtail-repository/wagtail/admin/tests/test_pages_views.py | TobiasSkovgaardJepsen/wagtail-on-heroku | 17e4720f86023225e0704890688998a80bb87a17 | [
"BSD-3-Clause"
] | null | null | null | wagtail-repository/wagtail/admin/tests/test_pages_views.py | TobiasSkovgaardJepsen/wagtail-on-heroku | 17e4720f86023225e0704890688998a80bb87a17 | [
"BSD-3-Clause"
] | 4 | 2020-06-05T17:00:01.000Z | 2021-06-17T20:15:01.000Z | wagtail-repository/wagtail/admin/tests/test_pages_views.py | TobiasSkovgaardJepsen/wagtail-on-heroku | 17e4720f86023225e0704890688998a80bb87a17 | [
"BSD-3-Clause"
] | 1 | 2019-04-16T14:14:55.000Z | 2019-04-16T14:14:55.000Z | import datetime
import logging
import os
from itertools import chain
import mock
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group, Permission
from django.contrib.contenttypes.models import ContentType
from django.contrib.messages import constants as message_constants
from django.core import mail, paginator
from django.core.files.base import ContentFile
from django.db.models.signals import post_delete, pre_delete
from django.http import HttpRequest, HttpResponse
from django.test import TestCase, modify_settings, override_settings
from django.urls import reverse
from django.utils import formats, timezone
from django.utils.dateparse import parse_date
from freezegun import freeze_time
from wagtail.tests.testapp.models import (
EVENT_AUDIENCE_CHOICES, Advert, AdvertPlacement, BusinessChild, BusinessIndex, BusinessSubIndex,
DefaultStreamPage, EventCategory, EventPage, EventPageCarouselItem, FilePage,
ManyToManyBlogPage, SimplePage, SingleEventPage, SingletonPage, StandardChild, StandardIndex,
TaggedPage)
from wagtail.tests.utils import WagtailTestUtils
from wagtail.admin.views.home import RecentEditsPanel
from wagtail.admin.views.pages import PreviewOnEdit
from wagtail.core.models import GroupPagePermission, Page, PageRevision, Site
from wagtail.core.signals import page_published, page_unpublished
from wagtail.search.index import SearchField
from wagtail.users.models import UserProfile
def submittable_timestamp(timestamp):
"""
Helper function to translate a possibly-timezone-aware datetime into the format used in the
go_live_at / expire_at form fields - "YYYY-MM-DD hh:mm", with no timezone indicator.
This will be interpreted as being in the server's timezone (settings.TIME_ZONE), so we
need to pass it through timezone.localtime to ensure that the client and server are in
agreement about what the timestamp means.
"""
return timezone.localtime(timestamp).strftime("%Y-%m-%d %H:%M")
def local_datetime(*args):
dt = datetime.datetime(*args)
return timezone.make_aware(dt)
class TestPageExplorer(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Add child page
self.child_page = SimplePage(
title="Hello world!",
slug="hello-world",
content="hello",
)
self.root_page.add_child(instance=self.child_page)
# more child pages to test ordering
self.old_page = StandardIndex(
title="Old page",
slug="old-page",
latest_revision_created_at=local_datetime(2010, 1, 1)
)
self.root_page.add_child(instance=self.old_page)
self.new_page = SimplePage(
title="New page",
slug="new-page",
content="hello",
latest_revision_created_at=local_datetime(2016, 1, 1)
)
self.root_page.add_child(instance=self.new_page)
# Login
self.user = self.login()
def test_explore(self):
response = self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(self.root_page, response.context['parent_page'])
# child pages should be most recent first
# (with null latest_revision_created_at at the end)
page_ids = [page.id for page in response.context['pages']]
self.assertEqual(page_ids, [self.new_page.id, self.old_page.id, self.child_page.id])
def test_explore_root(self):
response = self.client.get(reverse('wagtailadmin_explore_root'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(Page.objects.get(id=1), response.context['parent_page'])
self.assertTrue(response.context['pages'].paginator.object_list.filter(id=self.root_page.id).exists())
def test_explore_root_shows_icon(self):
response = self.client.get(reverse('wagtailadmin_explore_root'))
self.assertEqual(response.status_code, 200)
# Administrator (or user with add_site permission) should see the
# sites link with the icon-site icon
self.assertContains(
response,
("""<a href="/admin/sites/" class="icon icon-site" """
"""title="Sites menu"></a>""")
)
def test_ordering(self):
response = self.client.get(
reverse('wagtailadmin_explore', args=(self.root_page.id, )),
{'ordering': 'title'}
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(response.context['ordering'], 'title')
# child pages should be ordered by title
page_ids = [page.id for page in response.context['pages']]
self.assertEqual(page_ids, [self.child_page.id, self.new_page.id, self.old_page.id])
def test_reverse_ordering(self):
response = self.client.get(
reverse('wagtailadmin_explore', args=(self.root_page.id, )),
{'ordering': '-title'}
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(response.context['ordering'], '-title')
# child pages should be ordered by title
page_ids = [page.id for page in response.context['pages']]
self.assertEqual(page_ids, [self.old_page.id, self.new_page.id, self.child_page.id])
def test_ordering_by_last_revision_forward(self):
response = self.client.get(
reverse('wagtailadmin_explore', args=(self.root_page.id, )),
{'ordering': 'latest_revision_created_at'}
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(response.context['ordering'], 'latest_revision_created_at')
# child pages should be oldest revision first
# (with null latest_revision_created_at at the start)
page_ids = [page.id for page in response.context['pages']]
self.assertEqual(page_ids, [self.child_page.id, self.old_page.id, self.new_page.id])
def test_invalid_ordering(self):
response = self.client.get(
reverse('wagtailadmin_explore', args=(self.root_page.id, )),
{'ordering': 'invalid_order'}
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(response.context['ordering'], '-latest_revision_created_at')
def test_reordering(self):
response = self.client.get(
reverse('wagtailadmin_explore', args=(self.root_page.id, )),
{'ordering': 'ord'}
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(response.context['ordering'], 'ord')
# child pages should be ordered by native tree order (i.e. by creation time)
page_ids = [page.id for page in response.context['pages']]
self.assertEqual(page_ids, [self.child_page.id, self.old_page.id, self.new_page.id])
# Pages must not be paginated
self.assertNotIsInstance(response.context['pages'], paginator.Page)
def test_construct_explorer_page_queryset_hook(self):
# testapp implements a construct_explorer_page_queryset hook
# that only returns pages with a slug starting with 'hello'
# when the 'polite_pages_only' URL parameter is set
response = self.client.get(
reverse('wagtailadmin_explore', args=(self.root_page.id, )),
{'polite_pages_only': 'yes_please'}
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
page_ids = [page.id for page in response.context['pages']]
self.assertEqual(page_ids, [self.child_page.id])
def make_pages(self):
for i in range(150):
self.root_page.add_child(instance=SimplePage(
title="Page " + str(i),
slug="page-" + str(i),
content="hello",
))
def test_pagination(self):
self.make_pages()
response = self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, )), {'p': 2})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
# Check that we got the correct page
self.assertEqual(response.context['pages'].number, 2)
def test_pagination_invalid(self):
self.make_pages()
response = self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, )), {'p': 'Hello World!'})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
# Check that we got page one
self.assertEqual(response.context['pages'].number, 1)
def test_pagination_out_of_range(self):
self.make_pages()
response = self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, )), {'p': 99999})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
# Check that we got the last page
self.assertEqual(response.context['pages'].number, response.context['pages'].paginator.num_pages)
def test_listing_uses_specific_models(self):
# SingleEventPage has custom URL routing; the 'live' link in the listing
# should show the custom URL, which requires us to use the specific version
# of the class
self.new_event = SingleEventPage(
title="New event",
location='the moon', audience='public',
cost='free', date_from='2001-01-01',
latest_revision_created_at=local_datetime(2016, 1, 1)
)
self.root_page.add_child(instance=self.new_event)
response = self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '/new-event/pointless-suffix/')
def test_listing_uses_admin_display_title(self):
# SingleEventPage has a custom get_admin_display_title method; explorer should
# show the custom title rather than the basic database one
self.new_event = SingleEventPage(
title="New event",
location='the moon', audience='public',
cost='free', date_from='2001-01-01',
latest_revision_created_at=local_datetime(2016, 1, 1)
)
self.root_page.add_child(instance=self.new_event)
response = self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, )))
self.assertContains(response, 'New event (single event)')
response = self.client.get(reverse('wagtailadmin_explore', args=(self.new_event.id, )))
self.assertContains(response, 'New event (single event)')
def test_parent_page_is_specific(self):
response = self.client.get(reverse('wagtailadmin_explore', args=(self.child_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertIsInstance(response.context['parent_page'], SimplePage)
def test_explorer_no_perms(self):
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
admin = reverse('wagtailadmin_home')
self.assertRedirects(
self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, ))),
admin)
self.assertRedirects(
self.client.get(reverse('wagtailadmin_explore_root')), admin)
def test_explore_with_missing_page_model(self):
# Create a ContentType that doesn't correspond to a real model
missing_page_content_type = ContentType.objects.create(app_label='tests', model='missingpage')
# Turn /home/old-page/ into this content type
Page.objects.filter(id=self.old_page.id).update(content_type=missing_page_content_type)
# try to browse the the listing that contains the missing model
response = self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
# try to browse into the page itself
response = self.client.get(reverse('wagtailadmin_explore', args=(self.old_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
class TestPageExplorerSignposting(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=1)
# Find page with an associated site
self.site_page = Page.objects.get(id=2)
# Add another top-level page (which will have no corresponding site record)
self.no_site_page = SimplePage(
title="Hello world!",
slug="hello-world",
content="hello",
)
self.root_page.add_child(instance=self.no_site_page)
def test_admin_at_root(self):
self.assertTrue(self.client.login(username='superuser', password='password'))
response = self.client.get(reverse('wagtailadmin_explore_root'))
self.assertEqual(response.status_code, 200)
# Administrator (or user with add_site permission) should get the full message
# about configuring sites
self.assertContains(
response,
(
"The root level is where you can add new sites to your Wagtail installation. "
"Pages created here will not be accessible at any URL until they are associated with a site."
)
)
self.assertContains(response, """<a href="/admin/sites/">Configure a site now.</a>""")
def test_admin_at_non_site_page(self):
self.assertTrue(self.client.login(username='superuser', password='password'))
response = self.client.get(reverse('wagtailadmin_explore', args=(self.no_site_page.id, )))
self.assertEqual(response.status_code, 200)
# Administrator (or user with add_site permission) should get a warning about
# unroutable pages, and be directed to the site config area
self.assertContains(
response,
(
"There is no site set up for this location. "
"Pages created here will not be accessible at any URL until a site is associated with this location."
)
)
self.assertContains(response, """<a href="/admin/sites/">Configure a site now.</a>""")
def test_admin_at_site_page(self):
self.assertTrue(self.client.login(username='superuser', password='password'))
response = self.client.get(reverse('wagtailadmin_explore', args=(self.site_page.id, )))
self.assertEqual(response.status_code, 200)
# There should be no warning message here
self.assertNotContains(response, "Pages created here will not be accessible")
def test_nonadmin_at_root(self):
self.assertTrue(self.client.login(username='siteeditor', password='password'))
response = self.client.get(reverse('wagtailadmin_explore_root'))
self.assertEqual(response.status_code, 200)
# Non-admin should get a simple "create pages as children of the homepage" prompt
self.assertContains(
response,
"Pages created here will not be accessible at any URL. "
"To add pages to an existing site, create them as children of the homepage."
)
def test_nonadmin_at_non_site_page(self):
self.assertTrue(self.client.login(username='siteeditor', password='password'))
response = self.client.get(reverse('wagtailadmin_explore', args=(self.no_site_page.id, )))
self.assertEqual(response.status_code, 200)
# Non-admin should get a warning about unroutable pages
self.assertContains(
response,
(
"There is no site record for this location. "
"Pages created here will not be accessible at any URL."
)
)
def test_nonadmin_at_site_page(self):
self.assertTrue(self.client.login(username='siteeditor', password='password'))
response = self.client.get(reverse('wagtailadmin_explore', args=(self.site_page.id, )))
self.assertEqual(response.status_code, 200)
# There should be no warning message here
self.assertNotContains(response, "Pages created here will not be accessible")
class TestExplorablePageVisibility(TestCase, WagtailTestUtils):
"""
Test the way that the Explorable Pages functionality manifests within the Explorer.
This is isolated in its own test case because it requires a custom page tree and custom set of
users and groups.
The fixture sets up this page tree:
========================================================
ID Site Path
========================================================
1 /
2 testserver /home/
3 testserver /home/about-us/
4 example.com /example-home/
5 example.com /example-home/content/
6 example.com /example-home/content/page-1/
7 example.com /example-home/content/page-2/
9 example.com /example-home/content/page-2/child-1
8 example.com /example-home/other-content/
10 example2.com /home-2/
========================================================
Group 1 has explore and choose permissions rooted at testserver's homepage.
Group 2 has explore and choose permissions rooted at example.com's page-1.
Group 3 has explore and choose permissions rooted at example.com's other-content.
User "jane" is in Group 1.
User "bob" is in Group 2.
User "sam" is in Groups 1 and 2.
User "josh" is in Groups 2 and 3.
User "mary" is is no Groups, but she has the "access wagtail admin" permission.
User "superman" is an admin.
"""
fixtures = ['test_explorable_pages.json']
# Integration tests adapted from @coredumperror
def test_admin_can_explore_every_page(self):
self.assertTrue(self.client.login(username='superman', password='password'))
for page in Page.objects.all():
response = self.client.get(reverse('wagtailadmin_explore', args=[page.pk]))
self.assertEqual(response.status_code, 200)
def test_admin_sees_root_page_as_explorer_root(self):
self.assertTrue(self.client.login(username='superman', password='password'))
response = self.client.get(reverse('wagtailadmin_explore_root'))
self.assertEqual(response.status_code, 200)
# Administrator should see the full list of children of the Root page.
self.assertContains(response, "Welcome to testserver!")
self.assertContains(response, "Welcome to example.com!")
def test_admin_sees_breadcrumbs_up_to_root_page(self):
self.assertTrue(self.client.login(username='superman', password='password'))
response = self.client.get(reverse('wagtailadmin_explore', args=[6]))
self.assertEqual(response.status_code, 200)
self.assertInHTML(
"""<li class="home"><a href="/admin/pages/" class="icon icon-site text-replace">Root</a></li>""",
str(response.content)
)
self.assertInHTML("""<li><a href="/admin/pages/4/">Welcome to example.com!</a></li>""", str(response.content))
self.assertInHTML("""<li><a href="/admin/pages/5/">Content</a></li>""", str(response.content))
def test_nonadmin_sees_breadcrumbs_up_to_cca(self):
self.assertTrue(self.client.login(username='josh', password='password'))
response = self.client.get(reverse('wagtailadmin_explore', args=[6]))
self.assertEqual(response.status_code, 200)
# While at "Page 1", Josh should see the breadcrumbs leading only as far back as the example.com homepage,
# since it's his Closest Common Ancestor.
self.assertInHTML(
"""<li class="home"><a href="/admin/pages/4/" class="icon icon-home text-replace">Home</a></li>""",
str(response.content)
)
self.assertInHTML("""<li><a href="/admin/pages/5/">Content</a></li>""", str(response.content))
# The page title shouldn't appear because it's the "home" breadcrumb.
self.assertNotContains(response, "Welcome to example.com!")
def test_admin_home_page_changes_with_permissions(self):
self.assertTrue(self.client.login(username='bob', password='password'))
response = self.client.get(reverse('wagtailadmin_home'))
self.assertEqual(response.status_code, 200)
# Bob should only see the welcome for example.com, not testserver
self.assertContains(response, "Welcome to the example.com Wagtail CMS")
self.assertNotContains(response, "testserver")
def test_breadcrumb_with_no_user_permissions(self):
self.assertTrue(self.client.login(username='mary', password='password'))
response = self.client.get(reverse('wagtailadmin_home'))
self.assertEqual(response.status_code, 200)
# Since Mary has no page permissions, she should not see the breadcrumb
self.assertNotContains(response, """<li class="home"><a href="/admin/pages/4/" class="icon icon-home text-replace">Home</a></li>""")
class TestPageCreation(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Login
self.user = self.login()
def test_add_subpage(self):
response = self.client.get(reverse('wagtailadmin_pages:add_subpage', args=(self.root_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Simple page")
target_url = reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id))
self.assertContains(response, 'href="%s"' % target_url)
# List of available page types should not contain pages with is_creatable = False
self.assertNotContains(response, "MTI base page")
# List of available page types should not contain abstract pages
self.assertNotContains(response, "Abstract page")
# List of available page types should not contain pages whose parent_page_types forbid it
self.assertNotContains(response, "Business child")
def test_add_subpage_with_subpage_types(self):
# Add a BusinessIndex to test business rules in
business_index = BusinessIndex(
title="Hello world!",
slug="hello-world",
)
self.root_page.add_child(instance=business_index)
response = self.client.get(reverse('wagtailadmin_pages:add_subpage', args=(business_index.id, )))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Business child")
# List should not contain page types not in the subpage_types list
self.assertNotContains(response, "Simple page")
def test_add_subpage_with_one_valid_subpage_type(self):
# Add a BusinessSubIndex to test business rules in
business_index = BusinessIndex(
title="Hello world!",
slug="hello-world",
)
self.root_page.add_child(instance=business_index)
business_subindex = BusinessSubIndex(
title="Hello world!",
slug="hello-world",
)
business_index.add_child(instance=business_subindex)
response = self.client.get(reverse('wagtailadmin_pages:add_subpage', args=(business_subindex.id, )))
# Should be redirected to the 'add' page for BusinessChild, the only valid subpage type
self.assertRedirects(
response,
reverse('wagtailadmin_pages:add', args=('tests', 'businesschild', business_subindex.id))
)
def test_add_subpage_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get add subpage page
response = self.client.get(reverse('wagtailadmin_pages:add_subpage', args=(self.root_page.id, )))
# Check that the user received a 403 response
self.assertEqual(response.status_code, 403)
def test_add_subpage_nonexistantparent(self):
response = self.client.get(reverse('wagtailadmin_pages:add_subpage', args=(100000, )))
self.assertEqual(response.status_code, 404)
def test_add_subpage_with_next_param(self):
response = self.client.get(
reverse('wagtailadmin_pages:add_subpage', args=(self.root_page.id, )),
{'next': '/admin/users/'}
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Simple page")
target_url = reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id))
self.assertContains(response, 'href="%s?next=/admin/users/"' % target_url)
def test_create_simplepage(self):
response = self.client.get(reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<a href="#tab-content" class="active">Content</a>')
self.assertContains(response, '<a href="#tab-promote" class="">Promote</a>')
def test_create_multipart(self):
"""
Test checks if 'enctype="multipart/form-data"' is added and only to forms that require multipart encoding.
"""
# check for SimplePage where is no file field
response = self.client.get(reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'enctype="multipart/form-data"')
self.assertTemplateUsed(response, 'wagtailadmin/pages/create.html')
# check for FilePage which has file field
response = self.client.get(reverse('wagtailadmin_pages:add', args=('tests', 'filepage', self.root_page.id)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'enctype="multipart/form-data"')
def test_create_page_without_promote_tab(self):
"""
Test that the Promote tab is not rendered for page classes that define it as empty
"""
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('tests', 'standardindex', self.root_page.id))
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<a href="#tab-content" class="active">Content</a>')
self.assertNotContains(response, '<a href="#tab-promote" class="">Promote</a>')
def test_create_page_with_custom_tabs(self):
"""
Test that custom edit handlers are rendered
"""
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('tests', 'standardchild', self.root_page.id))
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<a href="#tab-content" class="active">Content</a>')
self.assertContains(response, '<a href="#tab-promote" class="">Promote</a>')
self.assertContains(response, '<a href="#tab-dinosaurs" class="">Dinosaurs</a>')
def test_create_simplepage_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get page
response = self.client.get(reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id, )))
# Check that the user received a 403 response
self.assertEqual(response.status_code, 403)
def test_cannot_create_page_with_is_creatable_false(self):
# tests.MTIBasePage has is_creatable=False, so attempting to add a new one
# should fail with permission denied
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('tests', 'mtibasepage', self.root_page.id))
)
self.assertEqual(response.status_code, 403)
def test_cannot_create_page_when_can_create_at_returns_false(self):
# issue #2892
# Check that creating a second SingletonPage results in a permission
# denied error.
# SingletonPage overrides the can_create_at method to make it return
# False if another SingletonPage already exists.
add_url = reverse('wagtailadmin_pages:add', args=[
SingletonPage._meta.app_label, SingletonPage._meta.model_name, self.root_page.pk])
# A single singleton page should be creatable
self.assertTrue(SingletonPage.can_create_at(self.root_page))
response = self.client.get(add_url)
self.assertEqual(response.status_code, 200)
# Create a singleton page
self.root_page.add_child(instance=SingletonPage(
title='singleton', slug='singleton'))
# A second singleton page should not be creatable
self.assertFalse(SingletonPage.can_create_at(self.root_page))
response = self.client.get(add_url)
self.assertEqual(response.status_code, 403)
def test_cannot_create_page_with_wrong_parent_page_types(self):
# tests.BusinessChild has limited parent_page_types, so attempting to add
# a new one at the root level should fail with permission denied
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('tests', 'businesschild', self.root_page.id))
)
self.assertEqual(response.status_code, 403)
def test_cannot_create_page_with_wrong_subpage_types(self):
# Add a BusinessIndex to test business rules in
business_index = BusinessIndex(
title="Hello world!",
slug="hello-world",
)
self.root_page.add_child(instance=business_index)
# BusinessIndex has limited subpage_types, so attempting to add a SimplePage
# underneath it should fail with permission denied
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', business_index.id))
)
self.assertEqual(response.status_code, 403)
def test_create_simplepage_post(self):
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)),
post_data
)
# Find the page and check it
page = Page.objects.get(path__startswith=self.root_page.path, slug='hello-world').specific
# Should be redirected to edit page
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(page.id, )))
self.assertEqual(page.title, post_data['title'])
self.assertEqual(page.draft_title, post_data['title'])
self.assertIsInstance(page, SimplePage)
self.assertFalse(page.live)
self.assertFalse(page.first_published_at)
# treebeard should report no consistency problems with the tree
self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_create_simplepage_scheduled(self):
go_live_at = timezone.now() + datetime.timedelta(days=1)
expire_at = timezone.now() + datetime.timedelta(days=2)
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
'go_live_at': submittable_timestamp(go_live_at),
'expire_at': submittable_timestamp(expire_at),
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data
)
# Should be redirected to explorer page
self.assertEqual(response.status_code, 302)
# Find the page and check the scheduled times
page = Page.objects.get(path__startswith=self.root_page.path, slug='hello-world').specific
self.assertEqual(page.go_live_at.date(), go_live_at.date())
self.assertEqual(page.expire_at.date(), expire_at.date())
self.assertEqual(page.expired, False)
self.assertTrue(page.status_string, "draft")
# No revisions with approved_go_live_at
self.assertFalse(PageRevision.objects.filter(page=page).exclude(approved_go_live_at__isnull=True).exists())
def test_create_simplepage_scheduled_go_live_before_expiry(self):
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
'go_live_at': submittable_timestamp(timezone.now() + datetime.timedelta(days=2)),
'expire_at': submittable_timestamp(timezone.now() + datetime.timedelta(days=1)),
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data
)
self.assertEqual(response.status_code, 200)
# Check that a form error was raised
self.assertFormError(response, 'form', 'go_live_at', "Go live date/time must be before expiry date/time")
self.assertFormError(response, 'form', 'expire_at', "Go live date/time must be before expiry date/time")
# form should be marked as having unsaved changes for the purposes of the dirty-forms warning
self.assertContains(response, "alwaysDirty: true")
def test_create_simplepage_scheduled_expire_in_the_past(self):
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
'expire_at': submittable_timestamp(timezone.now() + datetime.timedelta(days=-1)),
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data
)
self.assertEqual(response.status_code, 200)
# Check that a form error was raised
self.assertFormError(response, 'form', 'expire_at', "Expiry date/time must be in the future")
# form should be marked as having unsaved changes for the purposes of the dirty-forms warning
self.assertContains(response, "alwaysDirty: true")
def test_create_simplepage_post_publish(self):
# Connect a mock signal handler to page_published signal
mock_handler = mock.MagicMock()
page_published.connect(mock_handler)
# Post
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
'action-publish': "Publish",
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data
)
# Find the page and check it
page = Page.objects.get(path__startswith=self.root_page.path, slug='hello-world').specific
# Should be redirected to explorer
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
self.assertEqual(page.title, post_data['title'])
self.assertEqual(page.draft_title, post_data['title'])
self.assertIsInstance(page, SimplePage)
self.assertTrue(page.live)
self.assertTrue(page.first_published_at)
# Check that the page_published signal was fired
self.assertEqual(mock_handler.call_count, 1)
mock_call = mock_handler.mock_calls[0][2]
self.assertEqual(mock_call['sender'], page.specific_class)
self.assertEqual(mock_call['instance'], page)
self.assertIsInstance(mock_call['instance'], page.specific_class)
# treebeard should report no consistency problems with the tree
self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_create_simplepage_post_publish_scheduled(self):
go_live_at = timezone.now() + datetime.timedelta(days=1)
expire_at = timezone.now() + datetime.timedelta(days=2)
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
'action-publish': "Publish",
'go_live_at': submittable_timestamp(go_live_at),
'expire_at': submittable_timestamp(expire_at),
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data
)
# Should be redirected to explorer page
self.assertEqual(response.status_code, 302)
# Find the page and check it
page = Page.objects.get(path__startswith=self.root_page.path, slug='hello-world').specific
self.assertEqual(page.go_live_at.date(), go_live_at.date())
self.assertEqual(page.expire_at.date(), expire_at.date())
self.assertEqual(page.expired, False)
# A revision with approved_go_live_at should exist now
self.assertTrue(PageRevision.objects.filter(page=page).exclude(approved_go_live_at__isnull=True).exists())
# But Page won't be live
self.assertFalse(page.live)
self.assertFalse(page.first_published_at)
self.assertTrue(page.status_string, "scheduled")
def test_create_simplepage_post_submit(self):
# Create a moderator user for testing email
get_user_model().objects.create_superuser('moderator', 'moderator@email.com', 'password')
# Submit
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
'action-submit': "Submit",
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data
)
# Find the page and check it
page = Page.objects.get(path__startswith=self.root_page.path, slug='hello-world').specific
# Should be redirected to explorer
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
self.assertEqual(page.title, post_data['title'])
self.assertIsInstance(page, SimplePage)
self.assertFalse(page.live)
self.assertFalse(page.first_published_at)
# The latest revision for the page should now be in moderation
self.assertTrue(page.get_latest_revision().submitted_for_moderation)
# Check that the moderator got an email
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['moderator@email.com'])
self.assertEqual(mail.outbox[0].subject, 'The page "New page!" has been submitted for moderation')
def test_create_simplepage_post_existing_slug(self):
# This tests the existing slug checking on page save
# Create a page
self.child_page = SimplePage(title="Hello world!", slug="hello-world", content="hello")
self.root_page.add_child(instance=self.child_page)
# Attempt to create a new one with the same slug
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
'action-publish': "Publish",
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data
)
# Should not be redirected (as the save should fail)
self.assertEqual(response.status_code, 200)
# Check that a form error was raised
self.assertFormError(response, 'form', 'slug', "This slug is already in use")
# form should be marked as having unsaved changes for the purposes of the dirty-forms warning
self.assertContains(response, "alwaysDirty: true")
def test_create_nonexistantparent(self):
response = self.client.get(reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', 100000)))
self.assertEqual(response.status_code, 404)
def test_create_nonpagetype(self):
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('wagtailimages', 'image', self.root_page.id))
)
self.assertEqual(response.status_code, 404)
def test_preview_on_create(self):
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
'action-submit': "Submit",
}
preview_url = reverse('wagtailadmin_pages:preview_on_add',
args=('tests', 'simplepage', self.root_page.id))
response = self.client.post(preview_url, post_data)
# Check the JSON response
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(response.content.decode(), {'is_valid': True})
response = self.client.get(preview_url)
# Check the HTML response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'tests/simple_page.html')
self.assertContains(response, "New page!")
# Check that the treebeard attributes were set correctly on the page object
self.assertEqual(response.context['self'].depth, self.root_page.depth + 1)
self.assertTrue(response.context['self'].path.startswith(self.root_page.path))
self.assertEqual(response.context['self'].get_parent(), self.root_page)
def test_whitespace_titles(self):
post_data = {
'title': " ", # Single space on purpose
'content': "Some content",
'slug': 'hello-world',
'action-submit': "Submit",
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data
)
# Check that a form error was raised
self.assertFormError(response, 'form', 'title', "This field is required.")
def test_whitespace_titles_with_tab(self):
post_data = {
'title': "\t", # Single space on purpose
'content': "Some content",
'slug': 'hello-world',
'action-submit': "Submit",
}
response = self.client.post(reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data)
# Check that a form error was raised
self.assertFormError(response, 'form', 'title', "This field is required.")
def test_whitespace_titles_with_tab_in_seo_title(self):
post_data = {
'title': "Hello",
'content': "Some content",
'slug': 'hello-world',
'action-submit': "Submit",
'seo_title': '\t'
}
response = self.client.post(reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data)
# Should be successful, as seo_title is not required
self.assertEqual(response.status_code, 302)
# The tab should be automatically stripped from the seo_title
page = Page.objects.order_by('-id').first()
self.assertEqual(page.seo_title, '')
def test_whitespace_is_stripped_from_titles(self):
post_data = {
'title': " Hello ",
'content': "Some content",
'slug': 'hello-world',
'action-submit': "Submit",
'seo_title': ' hello SEO '
}
response = self.client.post(reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data)
# Should be successful, as both title and seo_title are non-empty after stripping
self.assertEqual(response.status_code, 302)
# Whitespace should be automatically stripped from title and seo_title
page = Page.objects.order_by('-id').first()
self.assertEqual(page.title, 'Hello')
self.assertEqual(page.draft_title, 'Hello')
self.assertEqual(page.seo_title, 'hello SEO')
def test_long_slug(self):
post_data = {
'title': "Hello world",
'content': "Some content",
'slug': 'hello-world-hello-world-hello-world-hello-world-hello-world-hello-world-'
'hello-world-hello-world-hello-world-hello-world-hello-world-hello-world-'
'hello-world-hello-world-hello-world-hello-world-hello-world-hello-world-'
'hello-world-hello-world-hello-world-hello-world-hello-world-hello-world',
'action-submit': "Submit",
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data
)
# Check that a form error was raised
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'slug', "Ensure this value has at most 255 characters (it has 287).")
def test_before_create_page_hook(self):
def hook_func(request, parent_page, page_class):
self.assertIsInstance(request, HttpRequest)
self.assertEqual(parent_page.id, self.root_page.id)
self.assertEqual(page_class, SimplePage)
return HttpResponse("Overridden!")
with self.register_hook('before_create_page', hook_func):
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id))
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
def test_before_create_page_hook_post(self):
def hook_func(request, parent_page, page_class):
self.assertIsInstance(request, HttpRequest)
self.assertEqual(parent_page.id, self.root_page.id)
self.assertEqual(page_class, SimplePage)
return HttpResponse("Overridden!")
with self.register_hook('before_create_page', hook_func):
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)),
post_data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
def test_after_create_page_hook(self):
def hook_func(request, page):
self.assertIsInstance(request, HttpRequest)
self.assertIsInstance(page, SimplePage)
return HttpResponse("Overridden!")
with self.register_hook('after_create_page', hook_func):
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)),
post_data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
class TestPageEdit(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Add child page
child_page = SimplePage(
title="Hello world!",
slug="hello-world",
content="hello",
)
self.root_page.add_child(instance=child_page)
child_page.save_revision().publish()
self.child_page = SimplePage.objects.get(id=child_page.id)
# Add file page
fake_file = ContentFile("File for testing multipart")
fake_file.name = 'test.txt'
file_page = FilePage(
title="File Page",
slug="file-page",
file_field=fake_file,
)
self.root_page.add_child(instance=file_page)
file_page.save_revision().publish()
self.file_page = FilePage.objects.get(id=file_page.id)
# Add event page (to test edit handlers)
self.event_page = EventPage(
title="Event page", slug="event-page",
location='the moon', audience='public',
cost='free', date_from='2001-01-01',
)
self.root_page.add_child(instance=self.event_page)
# Login
self.user = self.login()
def test_page_edit(self):
# Tests that the edit page loads
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.event_page.id, )))
self.assertEqual(response.status_code, 200)
# Test InlinePanel labels/headings
self.assertContains(response, '<legend>Speaker lineup</legend>')
self.assertContains(response, 'Add speakers')
def test_edit_multipart(self):
"""
Test checks if 'enctype="multipart/form-data"' is added and only to forms that require multipart encoding.
"""
# check for SimplePage where is no file field
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.event_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'enctype="multipart/form-data"')
self.assertTemplateUsed(response, 'wagtailadmin/pages/edit.html')
# check for FilePage which has file field
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.file_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'enctype="multipart/form-data"')
def test_upload_file_publish(self):
"""
Check that file uploads work when directly publishing
"""
file_upload = ContentFile(b"A new file", name='published-file.txt')
post_data = {
'title': 'New file',
'slug': 'new-file',
'file_field': file_upload,
'action-publish': "Publish",
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=[self.file_page.id]), post_data)
# Should be redirected to explorer
self.assertRedirects(response, reverse('wagtailadmin_explore', args=[self.root_page.id]))
# Check the new file exists
file_page = FilePage.objects.get()
self.assertEqual(file_page.file_field.name, file_upload.name)
self.assertTrue(os.path.exists(file_page.file_field.path))
self.assertEqual(file_page.file_field.read(), b"A new file")
def test_upload_file_draft(self):
"""
Check that file uploads work when saving a draft
"""
file_upload = ContentFile(b"A new file", name='draft-file.txt')
post_data = {
'title': 'New file',
'slug': 'new-file',
'file_field': file_upload,
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=[self.file_page.id]), post_data)
# Should be redirected to edit page
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=[self.file_page.id]))
# Check the file was uploaded
file_path = os.path.join(settings.MEDIA_ROOT, file_upload.name)
self.assertTrue(os.path.exists(file_path))
with open(file_path, 'rb') as saved_file:
self.assertEqual(saved_file.read(), b"A new file")
# Publish the draft just created
FilePage.objects.get().get_latest_revision().publish()
# Get the file page, check the file is set
file_page = FilePage.objects.get()
self.assertEqual(file_page.file_field.name, file_upload.name)
self.assertTrue(os.path.exists(file_page.file_field.path))
self.assertEqual(file_page.file_field.read(), b"A new file")
def test_page_edit_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get edit page
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )))
# Check that the user received a 403 response
self.assertEqual(response.status_code, 403)
def test_page_edit_post(self):
# Tests simple editing
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Should be redirected to edit page
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )))
# The page should have "has_unpublished_changes" flag set
child_page_new = SimplePage.objects.get(id=self.child_page.id)
self.assertTrue(child_page_new.has_unpublished_changes)
# Page fields should not be changed (because we just created a new draft)
self.assertEqual(child_page_new.title, self.child_page.title)
self.assertEqual(child_page_new.content, self.child_page.content)
self.assertEqual(child_page_new.slug, self.child_page.slug)
# The draft_title should have a new title
self.assertEqual(child_page_new.draft_title, post_data['title'])
def test_page_edit_post_when_locked(self):
# Tests that trying to edit a locked page results in an error
# Lock the page
self.child_page.locked = True
self.child_page.save()
# Post
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Shouldn't be redirected
self.assertContains(response, "The page could not be saved as it is locked")
# The page shouldn't have "has_unpublished_changes" flag set
child_page_new = SimplePage.objects.get(id=self.child_page.id)
self.assertFalse(child_page_new.has_unpublished_changes)
def test_edit_post_scheduled(self):
# put go_live_at and expire_at several days away from the current date, to avoid
# false matches in content_json__contains tests
go_live_at = timezone.now() + datetime.timedelta(days=10)
expire_at = timezone.now() + datetime.timedelta(days=20)
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'go_live_at': submittable_timestamp(go_live_at),
'expire_at': submittable_timestamp(expire_at),
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Should be redirected to explorer page
self.assertEqual(response.status_code, 302)
child_page_new = SimplePage.objects.get(id=self.child_page.id)
# The page will still be live
self.assertTrue(child_page_new.live)
# A revision with approved_go_live_at should not exist
self.assertFalse(PageRevision.objects.filter(
page=child_page_new).exclude(approved_go_live_at__isnull=True).exists()
)
# But a revision with go_live_at and expire_at in their content json *should* exist
self.assertTrue(PageRevision.objects.filter(
page=child_page_new, content_json__contains=str(go_live_at.date())).exists()
)
self.assertTrue(
PageRevision.objects.filter(page=child_page_new, content_json__contains=str(expire_at.date())).exists()
)
def test_edit_scheduled_go_live_before_expiry(self):
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'go_live_at': submittable_timestamp(timezone.now() + datetime.timedelta(days=2)),
'expire_at': submittable_timestamp(timezone.now() + datetime.timedelta(days=1)),
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
self.assertEqual(response.status_code, 200)
# Check that a form error was raised
self.assertFormError(response, 'form', 'go_live_at', "Go live date/time must be before expiry date/time")
self.assertFormError(response, 'form', 'expire_at', "Go live date/time must be before expiry date/time")
# form should be marked as having unsaved changes for the purposes of the dirty-forms warning
self.assertContains(response, "alwaysDirty: true")
def test_edit_scheduled_expire_in_the_past(self):
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'expire_at': submittable_timestamp(timezone.now() + datetime.timedelta(days=-1)),
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
self.assertEqual(response.status_code, 200)
# Check that a form error was raised
self.assertFormError(response, 'form', 'expire_at', "Expiry date/time must be in the future")
# form should be marked as having unsaved changes for the purposes of the dirty-forms warning
self.assertContains(response, "alwaysDirty: true")
def test_page_edit_post_publish(self):
# Connect a mock signal handler to page_published signal
mock_handler = mock.MagicMock()
page_published.connect(mock_handler)
# Set has_unpublished_changes=True on the existing record to confirm that the publish action
# is resetting it (and not just leaving it alone)
self.child_page.has_unpublished_changes = True
self.child_page.save()
# Save current value of first_published_at so we can check that it doesn't change
first_published_at = SimplePage.objects.get(id=self.child_page.id).first_published_at
# Tests publish from edit page
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world-new',
'action-publish': "Publish",
}
response = self.client.post(
reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data, follow=True
)
# Should be redirected to explorer
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page was edited
child_page_new = SimplePage.objects.get(id=self.child_page.id)
self.assertEqual(child_page_new.title, post_data['title'])
self.assertEqual(child_page_new.draft_title, post_data['title'])
# Check that the page_published signal was fired
self.assertEqual(mock_handler.call_count, 1)
mock_call = mock_handler.mock_calls[0][2]
self.assertEqual(mock_call['sender'], child_page_new.specific_class)
self.assertEqual(mock_call['instance'], child_page_new)
self.assertIsInstance(mock_call['instance'], child_page_new.specific_class)
# The page shouldn't have "has_unpublished_changes" flag set
self.assertFalse(child_page_new.has_unpublished_changes)
# first_published_at should not change as it was already set
self.assertEqual(first_published_at, child_page_new.first_published_at)
# The "View Live" button should have the updated slug.
for message in response.context['messages']:
self.assertIn('hello-world-new', message.message)
break
def test_first_published_at_editable(self):
"""Test that we can update the first_published_at via the Page edit form,
for page models that expose it."""
# Add child page, of a type which has first_published_at in its form
child_page = ManyToManyBlogPage(
title="Hello world!",
slug="hello-again-world",
body="hello",
)
self.root_page.add_child(instance=child_page)
child_page.save_revision().publish()
self.child_page = ManyToManyBlogPage.objects.get(id=child_page.id)
initial_delta = self.child_page.first_published_at - timezone.now()
first_published_at = timezone.now() - datetime.timedelta(days=2)
post_data = {
'title': "I've been edited!",
'body': "Some content",
'slug': 'hello-again-world',
'action-publish': "Publish",
'first_published_at': submittable_timestamp(first_published_at),
}
self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Get the edited page.
child_page_new = ManyToManyBlogPage.objects.get(id=self.child_page.id)
# first_published_at should have changed.
new_delta = child_page_new.first_published_at - timezone.now()
self.assertNotEqual(new_delta.days, initial_delta.days)
# first_published_at should be 3 days ago.
self.assertEqual(new_delta.days, -3)
def test_edit_post_publish_scheduled_unpublished_page(self):
# Unpublish the page
self.child_page.live = False
self.child_page.save()
go_live_at = timezone.now() + datetime.timedelta(days=1)
expire_at = timezone.now() + datetime.timedelta(days=2)
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'action-publish': "Publish",
'go_live_at': submittable_timestamp(go_live_at),
'expire_at': submittable_timestamp(expire_at),
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Should be redirected to explorer page
self.assertEqual(response.status_code, 302)
child_page_new = SimplePage.objects.get(id=self.child_page.id)
# The page should not be live anymore
self.assertFalse(child_page_new.live)
# Instead a revision with approved_go_live_at should now exist
self.assertTrue(
PageRevision.objects.filter(page=child_page_new).exclude(approved_go_live_at__isnull=True).exists()
)
# The page SHOULD have the "has_unpublished_changes" flag set,
# because the changes are not visible as a live page yet
self.assertTrue(
child_page_new.has_unpublished_changes,
"A page scheduled for future publishing should have has_unpublished_changes=True"
)
self.assertEqual(child_page_new.status_string, "scheduled")
def test_edit_post_publish_now_an_already_scheduled_unpublished_page(self):
# Unpublish the page
self.child_page.live = False
self.child_page.save()
# First let's publish a page with a go_live_at in the future
go_live_at = timezone.now() + datetime.timedelta(days=1)
expire_at = timezone.now() + datetime.timedelta(days=2)
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'action-publish': "Publish",
'go_live_at': submittable_timestamp(go_live_at),
'expire_at': submittable_timestamp(expire_at),
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Should be redirected to edit page
self.assertEqual(response.status_code, 302)
child_page_new = SimplePage.objects.get(id=self.child_page.id)
# The page should not be live
self.assertFalse(child_page_new.live)
self.assertEqual(child_page_new.status_string, "scheduled")
# Instead a revision with approved_go_live_at should now exist
self.assertTrue(
PageRevision.objects.filter(page=child_page_new).exclude(approved_go_live_at__isnull=True).exists()
)
# Now, let's edit it and publish it right now
go_live_at = timezone.now()
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'action-publish': "Publish",
'go_live_at': "",
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Should be redirected to edit page
self.assertEqual(response.status_code, 302)
child_page_new = SimplePage.objects.get(id=self.child_page.id)
# The page should be live now
self.assertTrue(child_page_new.live)
# And a revision with approved_go_live_at should not exist
self.assertFalse(
PageRevision.objects.filter(page=child_page_new).exclude(approved_go_live_at__isnull=True).exists()
)
def test_edit_post_publish_scheduled_published_page(self):
# Page is live
self.child_page.live = True
self.child_page.save()
live_revision = self.child_page.live_revision
original_title = self.child_page.title
go_live_at = timezone.now() + datetime.timedelta(days=1)
expire_at = timezone.now() + datetime.timedelta(days=2)
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'action-publish': "Publish",
'go_live_at': submittable_timestamp(go_live_at),
'expire_at': submittable_timestamp(expire_at),
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Should be redirected to explorer page
self.assertEqual(response.status_code, 302)
child_page_new = SimplePage.objects.get(id=self.child_page.id)
# The page should still be live
self.assertTrue(child_page_new.live)
self.assertEqual(child_page_new.status_string, "live + scheduled")
# Instead a revision with approved_go_live_at should now exist
self.assertTrue(
PageRevision.objects.filter(page=child_page_new).exclude(approved_go_live_at__isnull=True).exists()
)
# The page SHOULD have the "has_unpublished_changes" flag set,
# because the changes are not visible as a live page yet
self.assertTrue(
child_page_new.has_unpublished_changes,
"A page scheduled for future publishing should have has_unpublished_changes=True"
)
self.assertNotEqual(
child_page_new.get_latest_revision(), live_revision,
"A page scheduled for future publishing should have a new revision, that is not the live revision"
)
self.assertEqual(
child_page_new.title, original_title,
"A live page with scheduled revisions should still have original content"
)
def test_edit_post_publish_now_an_already_scheduled_published_page(self):
# Unpublish the page
self.child_page.live = True
self.child_page.save()
original_title = self.child_page.title
# First let's publish a page with a go_live_at in the future
go_live_at = timezone.now() + datetime.timedelta(days=1)
expire_at = timezone.now() + datetime.timedelta(days=2)
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'action-publish': "Publish",
'go_live_at': submittable_timestamp(go_live_at),
'expire_at': submittable_timestamp(expire_at),
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Should be redirected to edit page
self.assertEqual(response.status_code, 302)
child_page_new = SimplePage.objects.get(id=self.child_page.id)
# The page should still be live
self.assertTrue(child_page_new.live)
# Instead a revision with approved_go_live_at should now exist
self.assertTrue(
PageRevision.objects.filter(page=child_page_new).exclude(approved_go_live_at__isnull=True).exists()
)
self.assertEqual(
child_page_new.title, original_title,
"A live page with scheduled revisions should still have original content"
)
# Now, let's edit it and publish it right now
go_live_at = timezone.now()
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'action-publish': "Publish",
'go_live_at': "",
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Should be redirected to edit page
self.assertEqual(response.status_code, 302)
child_page_new = SimplePage.objects.get(id=self.child_page.id)
# The page should be live now
self.assertTrue(child_page_new.live)
# And a revision with approved_go_live_at should not exist
self.assertFalse(
PageRevision.objects.filter(page=child_page_new).exclude(approved_go_live_at__isnull=True).exists()
)
self.assertEqual(
child_page_new.title, post_data['title'],
"A published page should have the new title"
)
def test_page_edit_post_submit(self):
# Create a moderator user for testing email
get_user_model().objects.create_superuser('moderator', 'moderator@email.com', 'password')
# Tests submitting from edit page
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'action-submit': "Submit",
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Should be redirected to explorer
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# The page should have "has_unpublished_changes" flag set
child_page_new = SimplePage.objects.get(id=self.child_page.id)
self.assertTrue(child_page_new.has_unpublished_changes)
# The latest revision for the page should now be in moderation
self.assertTrue(child_page_new.get_latest_revision().submitted_for_moderation)
# Check that the moderator got an email
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['moderator@email.com'])
self.assertEqual(
mail.outbox[0].subject, 'The page "Hello world!" has been submitted for moderation'
) # Note: should this be "I've been edited!"?
def test_page_edit_post_existing_slug(self):
# This tests the existing slug checking on page edit
# Create a page
self.child_page = SimplePage(title="Hello world 2", slug="hello-world2", content="hello")
self.root_page.add_child(instance=self.child_page)
# Attempt to change the slug to one thats already in use
post_data = {
'title': "Hello world 2",
'slug': 'hello-world',
'action-submit': "Submit",
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Should not be redirected (as the save should fail)
self.assertEqual(response.status_code, 200)
# Check that a form error was raised
self.assertFormError(response, 'form', 'slug', "This slug is already in use")
def test_preview_on_edit(self):
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'action-submit': "Submit",
}
preview_url = reverse('wagtailadmin_pages:preview_on_edit',
args=(self.child_page.id,))
response = self.client.post(preview_url, post_data)
# Check the JSON response
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(response.content.decode(), {'is_valid': True})
response = self.client.get(preview_url)
# Check the HTML response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'tests/simple_page.html')
self.assertContains(response, "I've been edited!")
def test_preview_on_edit_no_session_key(self):
preview_url = reverse('wagtailadmin_pages:preview_on_edit',
args=(self.child_page.id,))
# get() without corresponding post(), key not set.
response = self.client.get(preview_url)
# Check the HTML response
self.assertEqual(response.status_code, 200)
# We should have an error page because we are unable to
# preview; the page key was not in the session.
self.assertContains(
response,
"<title>Wagtail - Preview error</title>",
html=True
)
self.assertContains(
response,
"<h1>Preview error</h1>",
html=True
)
@modify_settings(ALLOWED_HOSTS={'append': 'childpage.example.com'})
def test_preview_uses_correct_site(self):
# create a Site record for the child page
Site.objects.create(hostname='childpage.example.com', root_page=self.child_page)
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'action-submit': "Submit",
}
preview_url = reverse('wagtailadmin_pages:preview_on_edit',
args=(self.child_page.id,))
response = self.client.post(preview_url, post_data)
# Check the JSON response
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(response.content.decode(), {'is_valid': True})
response = self.client.get(preview_url)
# Check that the correct site object has been selected by the site middleware
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'tests/simple_page.html')
self.assertEqual(response.context['request'].site.hostname, 'childpage.example.com')
def test_editor_picks_up_direct_model_edits(self):
# If a page has no draft edits, the editor should show the version from the live database
# record rather than the latest revision record. This ensures that the edit interface
# reflects any changes made directly on the model.
self.child_page.title = "This title only exists on the live database record"
self.child_page.save()
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "This title only exists on the live database record")
def test_editor_does_not_pick_up_direct_model_edits_when_draft_edits_exist(self):
# If a page has draft edits, we should always show those in the editor, not the live
# database record
self.child_page.content = "Some content with a draft edit"
self.child_page.save_revision()
# make an independent change to the live database record
self.child_page = SimplePage.objects.get(id=self.child_page.id)
self.child_page.title = "This title only exists on the live database record"
self.child_page.save()
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "This title only exists on the live database record")
self.assertContains(response, "Some content with a draft edit")
def test_before_edit_page_hook(self):
def hook_func(request, page):
self.assertIsInstance(request, HttpRequest)
self.assertEqual(page.id, self.child_page.id)
return HttpResponse("Overridden!")
with self.register_hook('before_edit_page', hook_func):
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
def test_before_edit_page_hook_post(self):
def hook_func(request, page):
self.assertIsInstance(request, HttpRequest)
self.assertEqual(page.id, self.child_page.id)
return HttpResponse("Overridden!")
with self.register_hook('before_edit_page', hook_func):
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world-new',
'action-publish': "Publish",
}
response = self.client.post(
reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
def test_after_edit_page_hook(self):
def hook_func(request, page):
self.assertIsInstance(request, HttpRequest)
self.assertEqual(page.id, self.child_page.id)
return HttpResponse("Overridden!")
with self.register_hook('after_edit_page', hook_func):
post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world-new',
'action-publish': "Publish",
}
response = self.client.post(
reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
class TestPageEditReordering(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Add event page
self.event_page = EventPage(
title="Event page", slug="event-page",
location='the moon', audience='public',
cost='free', date_from='2001-01-01',
)
self.event_page.carousel_items = [
EventPageCarouselItem(caption='1234567', sort_order=1),
EventPageCarouselItem(caption='7654321', sort_order=2),
EventPageCarouselItem(caption='abcdefg', sort_order=3),
]
self.root_page.add_child(instance=self.event_page)
# Login
self.user = self.login()
def check_order(self, response, expected_order):
inline_panel = response.context['edit_handler'].children[0].children[9]
order = [child.form.instance.caption for child in inline_panel.children]
self.assertEqual(order, expected_order)
def test_order(self):
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.event_page.id, )))
self.assertEqual(response.status_code, 200)
self.check_order(response, ['1234567', '7654321', 'abcdefg'])
def test_reorder(self):
post_data = {
'title': "Event page",
'slug': 'event-page',
'date_from': '01/01/2014',
'cost': '$10',
'audience': 'public',
'location': 'somewhere',
'related_links-INITIAL_FORMS': 0,
'related_links-MAX_NUM_FORMS': 1000,
'related_links-TOTAL_FORMS': 0,
'speakers-INITIAL_FORMS': 0,
'speakers-MAX_NUM_FORMS': 1000,
'speakers-TOTAL_FORMS': 0,
'head_counts-INITIAL_FORMS': 0,
'head_counts-MAX_NUM_FORMS': 1000,
'head_counts-TOTAL_FORMS': 0,
'carousel_items-INITIAL_FORMS': 3,
'carousel_items-MAX_NUM_FORMS': 1000,
'carousel_items-TOTAL_FORMS': 3,
'carousel_items-0-id': self.event_page.carousel_items.all()[0].id,
'carousel_items-0-caption': self.event_page.carousel_items.all()[0].caption,
'carousel_items-0-ORDER': 2,
'carousel_items-1-id': self.event_page.carousel_items.all()[1].id,
'carousel_items-1-caption': self.event_page.carousel_items.all()[1].caption,
'carousel_items-1-ORDER': 3,
'carousel_items-2-id': self.event_page.carousel_items.all()[2].id,
'carousel_items-2-caption': self.event_page.carousel_items.all()[2].caption,
'carousel_items-2-ORDER': 1,
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.event_page.id, )), post_data)
# Should be redirected back to same page
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(self.event_page.id, )))
# Check order
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.event_page.id, )))
self.assertEqual(response.status_code, 200)
self.check_order(response, ['abcdefg', '1234567', '7654321'])
def test_reorder_with_validation_error(self):
post_data = {
'title': "", # Validation error
'slug': 'event-page',
'date_from': '01/01/2014',
'cost': '$10',
'audience': 'public',
'location': 'somewhere',
'related_links-INITIAL_FORMS': 0,
'related_links-MAX_NUM_FORMS': 1000,
'related_links-TOTAL_FORMS': 0,
'speakers-INITIAL_FORMS': 0,
'speakers-MAX_NUM_FORMS': 1000,
'speakers-TOTAL_FORMS': 0,
'head_counts-INITIAL_FORMS': 0,
'head_counts-MAX_NUM_FORMS': 1000,
'head_counts-TOTAL_FORMS': 0,
'carousel_items-INITIAL_FORMS': 3,
'carousel_items-MAX_NUM_FORMS': 1000,
'carousel_items-TOTAL_FORMS': 3,
'carousel_items-0-id': self.event_page.carousel_items.all()[0].id,
'carousel_items-0-caption': self.event_page.carousel_items.all()[0].caption,
'carousel_items-0-ORDER': 2,
'carousel_items-1-id': self.event_page.carousel_items.all()[1].id,
'carousel_items-1-caption': self.event_page.carousel_items.all()[1].caption,
'carousel_items-1-ORDER': 3,
'carousel_items-2-id': self.event_page.carousel_items.all()[2].id,
'carousel_items-2-caption': self.event_page.carousel_items.all()[2].caption,
'carousel_items-2-ORDER': 1,
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.event_page.id, )), post_data)
self.assertEqual(response.status_code, 200)
self.check_order(response, ['abcdefg', '1234567', '7654321'])
class TestPageDelete(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Add child page
self.child_page = SimplePage(title="Hello world!", slug="hello-world", content="hello")
self.root_page.add_child(instance=self.child_page)
# Add a page with child pages of its own
self.child_index = StandardIndex(title="Hello index", slug='hello-index')
self.root_page.add_child(instance=self.child_index)
self.grandchild_page = StandardChild(title="Hello Kitty", slug='hello-kitty')
self.child_index.add_child(instance=self.grandchild_page)
# Login
self.user = self.login()
def test_page_delete(self):
response = self.client.get(reverse('wagtailadmin_pages:delete', args=(self.child_page.id, )))
self.assertEqual(response.status_code, 200)
# deletion should not actually happen on GET
self.assertTrue(SimplePage.objects.filter(id=self.child_page.id).exists())
def test_page_delete_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get delete page
response = self.client.get(reverse('wagtailadmin_pages:delete', args=(self.child_page.id, )))
# Check that the user received a 403 response
self.assertEqual(response.status_code, 403)
# Check that the deletion has not happened
self.assertTrue(SimplePage.objects.filter(id=self.child_page.id).exists())
def test_page_delete_post(self):
# Connect a mock signal handler to page_unpublished signal
mock_handler = mock.MagicMock()
page_unpublished.connect(mock_handler)
# Post
response = self.client.post(reverse('wagtailadmin_pages:delete', args=(self.child_page.id, )))
# Should be redirected to explorer page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# treebeard should report no consistency problems with the tree
self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
# Check that the page is gone
self.assertEqual(Page.objects.filter(path__startswith=self.root_page.path, slug='hello-world').count(), 0)
# Check that the page_unpublished signal was fired
self.assertEqual(mock_handler.call_count, 1)
mock_call = mock_handler.mock_calls[0][2]
self.assertEqual(mock_call['sender'], self.child_page.specific_class)
self.assertEqual(mock_call['instance'], self.child_page)
self.assertIsInstance(mock_call['instance'], self.child_page.specific_class)
def test_page_delete_notlive_post(self):
# Same as above, but this makes sure the page_unpublished signal is not fired
# when if the page is not live when it is deleted
# Unpublish the page
self.child_page.live = False
self.child_page.save()
# Connect a mock signal handler to page_unpublished signal
mock_handler = mock.MagicMock()
page_unpublished.connect(mock_handler)
# Post
response = self.client.post(reverse('wagtailadmin_pages:delete', args=(self.child_page.id, )))
# Should be redirected to explorer page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# treebeard should report no consistency problems with the tree
self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
# Check that the page is gone
self.assertEqual(Page.objects.filter(path__startswith=self.root_page.path, slug='hello-world').count(), 0)
# Check that the page_unpublished signal was not fired
self.assertEqual(mock_handler.call_count, 0)
def test_subpage_deletion(self):
# Connect mock signal handlers to page_unpublished, pre_delete and post_delete signals
unpublish_signals_received = []
pre_delete_signals_received = []
post_delete_signals_received = []
def page_unpublished_handler(sender, instance, **kwargs):
unpublish_signals_received.append((sender, instance.id))
def pre_delete_handler(sender, instance, **kwargs):
pre_delete_signals_received.append((sender, instance.id))
def post_delete_handler(sender, instance, **kwargs):
post_delete_signals_received.append((sender, instance.id))
page_unpublished.connect(page_unpublished_handler)
pre_delete.connect(pre_delete_handler)
post_delete.connect(post_delete_handler)
# Post
response = self.client.post(reverse('wagtailadmin_pages:delete', args=(self.child_index.id, )))
# Should be redirected to explorer page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# treebeard should report no consistency problems with the tree
self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
# Check that the page is gone
self.assertFalse(StandardIndex.objects.filter(id=self.child_index.id).exists())
self.assertFalse(Page.objects.filter(id=self.child_index.id).exists())
# Check that the subpage is also gone
self.assertFalse(StandardChild.objects.filter(id=self.grandchild_page.id).exists())
self.assertFalse(Page.objects.filter(id=self.grandchild_page.id).exists())
# Check that the signals were fired for both pages
self.assertIn((StandardIndex, self.child_index.id), unpublish_signals_received)
self.assertIn((StandardChild, self.grandchild_page.id), unpublish_signals_received)
self.assertIn((StandardIndex, self.child_index.id), pre_delete_signals_received)
self.assertIn((StandardChild, self.grandchild_page.id), pre_delete_signals_received)
self.assertIn((StandardIndex, self.child_index.id), post_delete_signals_received)
self.assertIn((StandardChild, self.grandchild_page.id), post_delete_signals_received)
def test_before_delete_page_hook(self):
def hook_func(request, page):
self.assertIsInstance(request, HttpRequest)
self.assertEqual(page.id, self.child_page.id)
return HttpResponse("Overridden!")
with self.register_hook('before_delete_page', hook_func):
response = self.client.get(reverse('wagtailadmin_pages:delete', args=(self.child_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
def test_before_delete_page_hook_post(self):
def hook_func(request, page):
self.assertIsInstance(request, HttpRequest)
self.assertEqual(page.id, self.child_page.id)
return HttpResponse("Overridden!")
with self.register_hook('before_delete_page', hook_func):
response = self.client.post(reverse('wagtailadmin_pages:delete', args=(self.child_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
def test_after_delete_page_hook(self):
def hook_func(request, page):
self.assertIsInstance(request, HttpRequest)
self.assertEqual(page.id, self.child_page.id)
return HttpResponse("Overridden!")
with self.register_hook('after_delete_page', hook_func):
response = self.client.post(reverse('wagtailadmin_pages:delete', args=(self.child_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
class TestPageSearch(TestCase, WagtailTestUtils):
def setUp(self):
self.user = self.login()
def get(self, params=None, **extra):
return self.client.get(reverse('wagtailadmin_pages:search'), params or {}, **extra)
def test_view(self):
response = self.get()
self.assertTemplateUsed(response, 'wagtailadmin/pages/search.html')
self.assertEqual(response.status_code, 200)
def test_search(self):
response = self.get({'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/search.html')
self.assertEqual(response.context['query_string'], "Hello")
def test_search_searchable_fields(self):
# Find root page
root_page = Page.objects.get(id=2)
# Create a page
root_page.add_child(instance=SimplePage(
title="Hi there!", slug='hello-world', content="good morning",
live=True,
has_unpublished_changes=False,
))
# Confirm the slug is not being searched
response = self.get({'q': "hello"})
self.assertNotContains(response, "There is one matching page")
search_fields = Page.search_fields
# Add slug to the search_fields
Page.search_fields = Page.search_fields + [SearchField('slug', partial_match=True)]
# Confirm the slug is being searched
response = self.get({'q': "hello"})
self.assertContains(response, "There is one matching page")
# Reset the search fields
Page.search_fields = search_fields
def test_ajax(self):
response = self.get({'q': "Hello"}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
self.assertTemplateNotUsed(response, 'wagtailadmin/pages/search.html')
self.assertTemplateUsed(response, 'wagtailadmin/pages/search_results.html')
self.assertEqual(response.context['query_string'], "Hello")
def test_pagination(self):
pages = ['0', '1', '-1', '9999', 'Not a page']
for page in pages:
response = self.get({'q': "Hello", 'p': page})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/search.html')
def test_root_can_appear_in_search_results(self):
response = self.get({'q': "roo"})
self.assertEqual(response.status_code, 200)
# 'pages' list in the response should contain root
results = response.context['pages']
self.assertTrue(any([r.slug == 'root' for r in results]))
def test_search_uses_admin_display_title_from_specific_class(self):
# SingleEventPage has a custom get_admin_display_title method; explorer should
# show the custom title rather than the basic database one
root_page = Page.objects.get(id=2)
new_event = SingleEventPage(
title="Lunar event",
location='the moon', audience='public',
cost='free', date_from='2001-01-01',
latest_revision_created_at=local_datetime(2016, 1, 1)
)
root_page.add_child(instance=new_event)
response = self.get({'q': "lunar"})
self.assertContains(response, "Lunar event (single event)")
def test_search_no_perms(self):
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
self.assertRedirects(self.get(), '/admin/')
class TestPageMove(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Create two sections
self.section_a = SimplePage(title="Section A", slug="section-a", content="hello")
self.root_page.add_child(instance=self.section_a)
self.section_b = SimplePage(title="Section B", slug="section-b", content="hello")
self.root_page.add_child(instance=self.section_b)
# Add test page into section A
self.test_page = SimplePage(title="Hello world!", slug="hello-world", content="hello")
self.section_a.add_child(instance=self.test_page)
# Login
self.user = self.login()
def test_page_move(self):
response = self.client.get(reverse('wagtailadmin_pages:move', args=(self.test_page.id, )))
self.assertEqual(response.status_code, 200)
def test_page_move_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get move page
response = self.client.get(reverse('wagtailadmin_pages:move', args=(self.test_page.id, )))
# Check that the user received a 403 response
self.assertEqual(response.status_code, 403)
def test_page_move_confirm(self):
response = self.client.get(
reverse('wagtailadmin_pages:move_confirm', args=(self.test_page.id, self.section_b.id))
)
self.assertEqual(response.status_code, 200)
def test_page_set_page_position(self):
response = self.client.get(reverse('wagtailadmin_pages:set_page_position', args=(self.test_page.id, )))
self.assertEqual(response.status_code, 200)
class TestPageCopy(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Create a page
self.test_page = self.root_page.add_child(instance=SimplePage(
title="Hello world!",
slug='hello-world',
content="hello",
live=True,
has_unpublished_changes=False,
))
# Create a couple of child pages
self.test_child_page = self.test_page.add_child(instance=SimplePage(
title="Child page",
slug='child-page',
content="hello",
live=True,
has_unpublished_changes=True,
))
self.test_unpublished_child_page = self.test_page.add_child(instance=SimplePage(
title="Unpublished Child page",
slug='unpublished-child-page',
content="hello",
live=False,
has_unpublished_changes=True,
))
# Login
self.user = self.login()
def test_page_copy(self):
response = self.client.get(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/copy.html')
# Make sure all fields are in the form
self.assertContains(response, "New title")
self.assertContains(response, "New slug")
self.assertContains(response, "New parent page")
self.assertContains(response, "Copy subpages")
self.assertContains(response, "Publish copies")
def test_page_copy_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get copy page
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world',
'new_parent_page': str(self.test_page.id),
'copy_subpages': False,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data)
# A user with no page permissions at all should be redirected to the admin home
self.assertRedirects(response, reverse('wagtailadmin_home'))
# A user with page permissions, but not add permission at the destination,
# should receive a form validation error
publishers = Group.objects.create(name='Publishers')
GroupPagePermission.objects.create(
group=publishers, page=self.root_page, permission_type='publish'
)
self.user.groups.add(publishers)
self.user.save()
# Get copy page
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world',
'new_parent_page': str(self.test_page.id),
'copy_subpages': False,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data)
form = response.context['form']
self.assertFalse(form.is_valid())
self.assertTrue('new_parent_page' in form.errors)
def test_page_copy_post(self):
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world-2',
'new_parent_page': str(self.root_page.id),
'copy_subpages': False,
'publish_copies': False,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data)
# Check that the user was redirected to the parents explore page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Get copy
page_copy = self.root_page.get_children().filter(slug='hello-world-2').first()
# Check that the copy exists
self.assertNotEqual(page_copy, None)
# Check that the copy is not live
self.assertFalse(page_copy.live)
self.assertTrue(page_copy.has_unpublished_changes)
# Check that the owner of the page is set correctly
self.assertEqual(page_copy.owner, self.user)
# Check that the children were not copied
self.assertEqual(page_copy.get_children().count(), 0)
# treebeard should report no consistency problems with the tree
self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_page_copy_post_copy_subpages(self):
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world-2',
'new_parent_page': str(self.root_page.id),
'copy_subpages': True,
'publish_copies': False,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data)
# Check that the user was redirected to the parents explore page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Get copy
page_copy = self.root_page.get_children().filter(slug='hello-world-2').first()
# Check that the copy exists
self.assertNotEqual(page_copy, None)
# Check that the copy is not live
self.assertFalse(page_copy.live)
self.assertTrue(page_copy.has_unpublished_changes)
# Check that the owner of the page is set correctly
self.assertEqual(page_copy.owner, self.user)
# Check that the children were copied
self.assertEqual(page_copy.get_children().count(), 2)
# Check the the child pages
# Neither of them should be live
child_copy = page_copy.get_children().filter(slug='child-page').first()
self.assertNotEqual(child_copy, None)
self.assertFalse(child_copy.live)
self.assertTrue(child_copy.has_unpublished_changes)
unpublished_child_copy = page_copy.get_children().filter(slug='unpublished-child-page').first()
self.assertNotEqual(unpublished_child_copy, None)
self.assertFalse(unpublished_child_copy.live)
self.assertTrue(unpublished_child_copy.has_unpublished_changes)
# treebeard should report no consistency problems with the tree
self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_page_copy_post_copy_subpages_publish_copies(self):
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world-2',
'new_parent_page': str(self.root_page.id),
'copy_subpages': True,
'publish_copies': True,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data)
# Check that the user was redirected to the parents explore page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Get copy
page_copy = self.root_page.get_children().filter(slug='hello-world-2').first()
# Check that the copy exists
self.assertNotEqual(page_copy, None)
# Check that the copy is live
self.assertTrue(page_copy.live)
self.assertFalse(page_copy.has_unpublished_changes)
# Check that the owner of the page is set correctly
self.assertEqual(page_copy.owner, self.user)
# Check that the children were copied
self.assertEqual(page_copy.get_children().count(), 2)
# Check the the child pages
# The child_copy should be live but the unpublished_child_copy shouldn't
child_copy = page_copy.get_children().filter(slug='child-page').first()
self.assertNotEqual(child_copy, None)
self.assertTrue(child_copy.live)
self.assertTrue(child_copy.has_unpublished_changes)
unpublished_child_copy = page_copy.get_children().filter(slug='unpublished-child-page').first()
self.assertNotEqual(unpublished_child_copy, None)
self.assertFalse(unpublished_child_copy.live)
self.assertTrue(unpublished_child_copy.has_unpublished_changes)
# treebeard should report no consistency problems with the tree
self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_page_copy_post_new_parent(self):
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world-2',
'new_parent_page': str(self.test_child_page.id),
'copy_subpages': False,
'publish_copies': False,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data)
# Check that the user was redirected to the new parents explore page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.test_child_page.id, )))
# Check that the page was copied to the correct place
self.assertTrue(Page.objects.filter(slug='hello-world-2').first().get_parent(), self.test_child_page)
# treebeard should report no consistency problems with the tree
self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_page_copy_post_existing_slug_within_same_parent_page(self):
# This tests the existing slug checking on page copy when not changing the parent page
# Attempt to copy the page but forget to change the slug
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world',
'new_parent_page': str(self.root_page.id),
'copy_subpages': False,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data)
# Should not be redirected (as the save should fail)
self.assertEqual(response.status_code, 200)
# Check that a form error was raised
self.assertFormError(
response,
'form',
'new_slug',
"This slug is already in use within the context of its parent page \"Welcome to your new Wagtail site!\""
)
def test_page_copy_post_and_subpages_to_same_tree_branch(self):
# This tests that a page cannot be copied into itself when copying subpages
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world',
'new_parent_page': str(self.test_child_page.id),
'copy_subpages': True,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id,)), post_data)
# Should not be redirected (as the save should fail)
self.assertEqual(response.status_code, 200)
# Check that a form error was raised
self.assertFormError(
response, 'form', 'new_parent_page', "You cannot copy a page into itself when copying subpages"
)
def test_page_copy_post_existing_slug_to_another_parent_page(self):
# This tests the existing slug checking on page copy when changing the parent page
# Attempt to copy the page and changed the parent page
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world',
'new_parent_page': str(self.test_child_page.id),
'copy_subpages': False,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data)
# Check that the user was redirected to the parents explore page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.test_child_page.id, )))
def test_page_copy_post_invalid_slug(self):
# Attempt to copy the page but set an invalid slug string
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello world!',
'new_parent_page': str(self.root_page.id),
'copy_subpages': False,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data)
# Should not be redirected (as the save should fail)
self.assertEqual(response.status_code, 200)
# Check that a form error was raised
self.assertFormError(
response, 'form', 'new_slug', "Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."
)
def test_page_copy_no_publish_permission(self):
# Turn user into an editor who can add pages but not publish them
self.user.is_superuser = False
self.user.groups.add(
Group.objects.get(name="Editors"),
)
self.user.save()
# Get copy page
response = self.client.get(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )))
# The user should have access to the copy page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/copy.html')
# Make sure the "publish copies" field is hidden
self.assertNotContains(response, "Publish copies")
def test_page_copy_no_publish_permission_post_copy_subpages_publish_copies(self):
# This tests that unprivileged users cannot publish copied pages even if they hack their browser
# Turn user into an editor who can add pages but not publish them
self.user.is_superuser = False
self.user.groups.add(
Group.objects.get(name="Editors"),
)
self.user.save()
# Post
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world-2',
'new_parent_page': str(self.root_page.id),
'copy_subpages': True,
'publish_copies': True,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data)
# Check that the user was redirected to the parents explore page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Get copy
page_copy = self.root_page.get_children().filter(slug='hello-world-2').first()
# Check that the copy exists
self.assertNotEqual(page_copy, None)
# Check that the copy is not live
self.assertFalse(page_copy.live)
# Check that the owner of the page is set correctly
self.assertEqual(page_copy.owner, self.user)
# Check that the children were copied
self.assertEqual(page_copy.get_children().count(), 2)
# Check the the child pages
# Neither of them should be live
child_copy = page_copy.get_children().filter(slug='child-page').first()
self.assertNotEqual(child_copy, None)
self.assertFalse(child_copy.live)
unpublished_child_copy = page_copy.get_children().filter(slug='unpublished-child-page').first()
self.assertNotEqual(unpublished_child_copy, None)
self.assertFalse(unpublished_child_copy.live)
# treebeard should report no consistency problems with the tree
self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_before_copy_page_hook(self):
def hook_func(request, page):
self.assertIsInstance(request, HttpRequest)
self.assertIsInstance(page.specific, SimplePage)
return HttpResponse("Overridden!")
with self.register_hook('before_copy_page', hook_func):
response = self.client.get(reverse('wagtailadmin_pages:copy', args=(self.test_page.id,)))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
def test_before_copy_page_hook_post(self):
def hook_func(request, page):
self.assertIsInstance(request, HttpRequest)
self.assertIsInstance(page.specific, SimplePage)
return HttpResponse("Overridden!")
with self.register_hook('before_copy_page', hook_func):
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world-2',
'new_parent_page': str(self.root_page.id),
'copy_subpages': False,
'publish_copies': False,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id,)), post_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
def test_after_copy_page_hook(self):
def hook_func(request, page, new_page):
self.assertIsInstance(request, HttpRequest)
self.assertIsInstance(page.specific, SimplePage)
self.assertIsInstance(new_page.specific, SimplePage)
return HttpResponse("Overridden!")
with self.register_hook('after_copy_page', hook_func):
post_data = {
'new_title': "Hello world 2",
'new_slug': 'hello-world-2',
'new_parent_page': str(self.root_page.id),
'copy_subpages': False,
'publish_copies': False,
}
response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id,)), post_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Overridden!")
class TestPageUnpublish(TestCase, WagtailTestUtils):
def setUp(self):
self.user = self.login()
# Create a page to unpublish
self.root_page = Page.objects.get(id=2)
self.page = SimplePage(
title="Hello world!",
slug='hello-world',
content="hello",
live=True,
)
self.root_page.add_child(instance=self.page)
def test_unpublish_view(self):
"""
This tests that the unpublish view responds with an unpublish confirm page
"""
# Get unpublish page
response = self.client.get(reverse('wagtailadmin_pages:unpublish', args=(self.page.id, )))
# Check that the user received an unpublish confirm page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/confirm_unpublish.html')
def test_unpublish_view_invalid_page_id(self):
"""
This tests that the unpublish view returns an error if the page id is invalid
"""
# Get unpublish page
response = self.client.get(reverse('wagtailadmin_pages:unpublish', args=(12345, )))
# Check that the user received a 404 response
self.assertEqual(response.status_code, 404)
def test_unpublish_view_bad_permissions(self):
"""
This tests that the unpublish view doesn't allow users without unpublish permissions
"""
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get unpublish page
response = self.client.get(reverse('wagtailadmin_pages:unpublish', args=(self.page.id, )))
# Check that the user received a 403 response
self.assertEqual(response.status_code, 403)
def test_unpublish_view_post(self):
"""
This posts to the unpublish view and checks that the page was unpublished
"""
# Connect a mock signal handler to page_unpublished signal
mock_handler = mock.MagicMock()
page_unpublished.connect(mock_handler)
# Post to the unpublish page
response = self.client.post(reverse('wagtailadmin_pages:unpublish', args=(self.page.id, )))
# Should be redirected to explorer page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page was unpublished
self.assertFalse(SimplePage.objects.get(id=self.page.id).live)
# Check that the page_unpublished signal was fired
self.assertEqual(mock_handler.call_count, 1)
mock_call = mock_handler.mock_calls[0][2]
self.assertEqual(mock_call['sender'], self.page.specific_class)
self.assertEqual(mock_call['instance'], self.page)
self.assertIsInstance(mock_call['instance'], self.page.specific_class)
def test_unpublish_descendants_view(self):
"""
This tests that the unpublish view responds with an unpublish confirm page that does not contain the form field 'include_descendants'
"""
# Get unpublish page
response = self.client.get(reverse('wagtailadmin_pages:unpublish', args=(self.page.id, )))
# Check that the user received an unpublish confirm page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/confirm_unpublish.html')
# Check the form does not contain the checkbox field include_descendants
self.assertNotContains(response, '<input id="id_include_descendants" name="include_descendants" type="checkbox">')
class TestPageUnpublishIncludingDescendants(TestCase, WagtailTestUtils):
def setUp(self):
self.user = self.login()
# Find root page
self.root_page = Page.objects.get(id=2)
# Create a page to unpublish
self.test_page = self.root_page.add_child(instance=SimplePage(
title="Hello world!",
slug='hello-world',
content="hello",
live=True,
has_unpublished_changes=False,
))
# Create a couple of child pages
self.test_child_page = self.test_page.add_child(instance=SimplePage(
title="Child page",
slug='child-page',
content="hello",
live=True,
has_unpublished_changes=True,
))
self.test_another_child_page = self.test_page.add_child(instance=SimplePage(
title="Another Child page",
slug='another-child-page',
content="hello",
live=True,
has_unpublished_changes=True,
))
def test_unpublish_descendants_view(self):
"""
This tests that the unpublish view responds with an unpublish confirm page that contains the form field 'include_descendants'
"""
# Get unpublish page
response = self.client.get(reverse('wagtailadmin_pages:unpublish', args=(self.test_page.id, )))
# Check that the user received an unpublish confirm page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/confirm_unpublish.html')
# Check the form contains the checkbox field include_descendants
self.assertContains(response, '<input id="id_include_descendants" name="include_descendants" type="checkbox">')
def test_unpublish_include_children_view_post(self):
"""
This posts to the unpublish view and checks that the page and its descendants were unpublished
"""
# Post to the unpublish page
response = self.client.post(reverse('wagtailadmin_pages:unpublish', args=(self.test_page.id, )), {'include_descendants': 'on'})
# Should be redirected to explorer page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page was unpublished
self.assertFalse(SimplePage.objects.get(id=self.test_page.id).live)
# Check that the descendant pages were unpiblished as well
self.assertFalse(SimplePage.objects.get(id=self.test_child_page.id).live)
self.assertFalse(SimplePage.objects.get(id=self.test_another_child_page.id).live)
def test_unpublish_not_include_children_view_post(self):
"""
This posts to the unpublish view and checks that the page was unpublished but its descendants were not
"""
# Post to the unpublish page
response = self.client.post(reverse('wagtailadmin_pages:unpublish', args=(self.test_page.id, )), {})
# Should be redirected to explorer page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page was unpublished
self.assertFalse(SimplePage.objects.get(id=self.test_page.id).live)
# Check that the descendant pages were not unpublished
self.assertTrue(SimplePage.objects.get(id=self.test_child_page.id).live)
self.assertTrue(SimplePage.objects.get(id=self.test_another_child_page.id).live)
class TestApproveRejectModeration(TestCase, WagtailTestUtils):
def setUp(self):
self.submitter = get_user_model().objects.create_superuser(
username='submitter',
email='submitter@email.com',
password='password',
)
self.user = self.login()
# Create a page and submit it for moderation
root_page = Page.objects.get(id=2)
self.page = SimplePage(
title="Hello world!",
slug='hello-world',
content="hello",
live=False,
has_unpublished_changes=True,
)
root_page.add_child(instance=self.page)
self.page.save_revision(user=self.submitter, submitted_for_moderation=True)
self.revision = self.page.get_latest_revision()
def test_approve_moderation_view(self):
"""
This posts to the approve moderation view and checks that the page was approved
"""
# Connect a mock signal handler to page_published signal
mock_handler = mock.MagicMock()
page_published.connect(mock_handler)
# Post
response = self.client.post(reverse('wagtailadmin_pages:approve_moderation', args=(self.revision.id, )))
# Check that the user was redirected to the dashboard
self.assertRedirects(response, reverse('wagtailadmin_home'))
page = Page.objects.get(id=self.page.id)
# Page must be live
self.assertTrue(page.live, "Approving moderation failed to set live=True")
# Page should now have no unpublished changes
self.assertFalse(
page.has_unpublished_changes,
"Approving moderation failed to set has_unpublished_changes=False"
)
# Check that the page_published signal was fired
self.assertEqual(mock_handler.call_count, 1)
mock_call = mock_handler.mock_calls[0][2]
self.assertEqual(mock_call['sender'], self.page.specific_class)
self.assertEqual(mock_call['instance'], self.page)
self.assertIsInstance(mock_call['instance'], self.page.specific_class)
def test_approve_moderation_when_later_revision_exists(self):
self.page.title = "Goodbye world!"
self.page.save_revision(user=self.submitter, submitted_for_moderation=False)
response = self.client.post(reverse('wagtailadmin_pages:approve_moderation', args=(self.revision.id, )))
# Check that the user was redirected to the dashboard
self.assertRedirects(response, reverse('wagtailadmin_home'))
page = Page.objects.get(id=self.page.id)
# Page must be live
self.assertTrue(page.live, "Approving moderation failed to set live=True")
# Page content should be the submitted version, not the published one
self.assertEqual(page.title, "Hello world!")
# Page should still have unpublished changes
self.assertTrue(
page.has_unpublished_changes,
"has_unpublished_changes incorrectly cleared on approve_moderation when a later revision exists"
)
def test_approve_moderation_view_bad_revision_id(self):
"""
This tests that the approve moderation view handles invalid revision ids correctly
"""
# Post
response = self.client.post(reverse('wagtailadmin_pages:approve_moderation', args=(12345, )))
# Check that the user received a 404 response
self.assertEqual(response.status_code, 404)
def test_approve_moderation_view_bad_permissions(self):
"""
This tests that the approve moderation view doesn't allow users without moderation permissions
"""
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Post
response = self.client.post(reverse('wagtailadmin_pages:approve_moderation', args=(self.revision.id, )))
# Check that the user received a 403 response
self.assertEqual(response.status_code, 403)
def test_reject_moderation_view(self):
"""
This posts to the reject moderation view and checks that the page was rejected
"""
# Post
response = self.client.post(reverse('wagtailadmin_pages:reject_moderation', args=(self.revision.id, )))
# Check that the user was redirected to the dashboard
self.assertRedirects(response, reverse('wagtailadmin_home'))
# Page must not be live
self.assertFalse(Page.objects.get(id=self.page.id).live)
# Revision must no longer be submitted for moderation
self.assertFalse(PageRevision.objects.get(id=self.revision.id).submitted_for_moderation)
def test_reject_moderation_view_bad_revision_id(self):
"""
This tests that the reject moderation view handles invalid revision ids correctly
"""
# Post
response = self.client.post(reverse('wagtailadmin_pages:reject_moderation', args=(12345, )))
# Check that the user received a 404 response
self.assertEqual(response.status_code, 404)
def test_reject_moderation_view_bad_permissions(self):
"""
This tests that the reject moderation view doesn't allow users without moderation permissions
"""
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Post
response = self.client.post(reverse('wagtailadmin_pages:reject_moderation', args=(self.revision.id, )))
# Check that the user received a 403 response
self.assertEqual(response.status_code, 403)
def test_preview_for_moderation(self):
response = self.client.get(reverse('wagtailadmin_pages:preview_for_moderation', args=(self.revision.id, )))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'tests/simple_page.html')
self.assertContains(response, "Hello world!")
class TestContentTypeUse(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.user = self.login()
def test_content_type_use(self):
# Get use of event page
response = self.client.get(reverse('wagtailadmin_pages:type_use', args=('tests', 'eventpage')))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/content_type_use.html')
self.assertContains(response, "Christmas")
class TestSubpageBusinessRules(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Add standard page (allows subpages of any type)
self.standard_index = StandardIndex()
self.standard_index.title = "Standard Index"
self.standard_index.slug = "standard-index"
self.root_page.add_child(instance=self.standard_index)
# Add business page (allows BusinessChild and BusinessSubIndex as subpages)
self.business_index = BusinessIndex()
self.business_index.title = "Business Index"
self.business_index.slug = "business-index"
self.root_page.add_child(instance=self.business_index)
# Add business child (allows no subpages)
self.business_child = BusinessChild()
self.business_child.title = "Business Child"
self.business_child.slug = "business-child"
self.business_index.add_child(instance=self.business_child)
# Add business subindex (allows only BusinessChild as subpages)
self.business_subindex = BusinessSubIndex()
self.business_subindex.title = "Business Subindex"
self.business_subindex.slug = "business-subindex"
self.business_index.add_child(instance=self.business_subindex)
# Login
self.login()
def test_standard_subpage(self):
add_subpage_url = reverse('wagtailadmin_pages:add_subpage', args=(self.standard_index.id, ))
# explorer should contain a link to 'add child page'
response = self.client.get(reverse('wagtailadmin_explore', args=(self.standard_index.id, )))
self.assertEqual(response.status_code, 200)
self.assertContains(response, add_subpage_url)
# add_subpage should give us choices of StandardChild, and BusinessIndex.
# BusinessSubIndex and BusinessChild are not allowed
response = self.client.get(add_subpage_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, StandardChild.get_verbose_name())
self.assertContains(response, BusinessIndex.get_verbose_name())
self.assertNotContains(response, BusinessSubIndex.get_verbose_name())
self.assertNotContains(response, BusinessChild.get_verbose_name())
def test_business_subpage(self):
add_subpage_url = reverse('wagtailadmin_pages:add_subpage', args=(self.business_index.id, ))
# explorer should contain a link to 'add child page'
response = self.client.get(reverse('wagtailadmin_explore', args=(self.business_index.id, )))
self.assertEqual(response.status_code, 200)
self.assertContains(response, add_subpage_url)
# add_subpage should give us a cut-down set of page types to choose
response = self.client.get(add_subpage_url)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, StandardIndex.get_verbose_name())
self.assertNotContains(response, StandardChild.get_verbose_name())
self.assertContains(response, BusinessSubIndex.get_verbose_name())
self.assertContains(response, BusinessChild.get_verbose_name())
def test_business_child_subpage(self):
add_subpage_url = reverse('wagtailadmin_pages:add_subpage', args=(self.business_child.id, ))
# explorer should not contain a link to 'add child page', as this page doesn't accept subpages
response = self.client.get(reverse('wagtailadmin_explore', args=(self.business_child.id, )))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, add_subpage_url)
# this also means that fetching add_subpage is blocked at the permission-check level
response = self.client.get(reverse('wagtailadmin_pages:add_subpage', args=(self.business_child.id, )))
self.assertEqual(response.status_code, 403)
def test_cannot_add_invalid_subpage_type(self):
# cannot add StandardChild as a child of BusinessIndex, as StandardChild is not present in subpage_types
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('tests', 'standardchild', self.business_index.id))
)
self.assertEqual(response.status_code, 403)
# likewise for BusinessChild which has an empty subpage_types list
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('tests', 'standardchild', self.business_child.id))
)
self.assertEqual(response.status_code, 403)
# cannot add BusinessChild to StandardIndex, as BusinessChild restricts is parent page types
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('tests', 'businesschild', self.standard_index.id))
)
self.assertEqual(response.status_code, 403)
# but we can add a BusinessChild to BusinessIndex
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('tests', 'businesschild', self.business_index.id))
)
self.assertEqual(response.status_code, 200)
def test_not_prompted_for_page_type_when_only_one_choice(self):
response = self.client.get(reverse('wagtailadmin_pages:add_subpage', args=(self.business_subindex.id, )))
# BusinessChild is the only valid subpage type of BusinessSubIndex, so redirect straight there
self.assertRedirects(
response, reverse('wagtailadmin_pages:add', args=('tests', 'businesschild', self.business_subindex.id))
)
class TestNotificationPreferences(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Login
self.user = self.login()
# Create two moderator users for testing 'submitted' email
User = get_user_model()
self.moderator = User.objects.create_superuser('moderator', 'moderator@email.com', 'password')
self.moderator2 = User.objects.create_superuser('moderator2', 'moderator2@email.com', 'password')
# Create a submitter for testing 'rejected' and 'approved' emails
self.submitter = User.objects.create_user('submitter', 'submitter@email.com', 'password')
# User profiles for moderator2 and the submitter
self.moderator2_profile = UserProfile.get_for_user(self.moderator2)
self.submitter_profile = UserProfile.get_for_user(self.submitter)
# Create a page and submit it for moderation
self.child_page = SimplePage(
title="Hello world!",
slug='hello-world',
content="hello",
live=False,
)
self.root_page.add_child(instance=self.child_page)
# POST data to edit the page
self.post_data = {
'title': "I've been edited!",
'content': "Some content",
'slug': 'hello-world',
'action-submit': "Submit",
}
def submit(self):
return self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), self.post_data)
def silent_submit(self):
"""
Sets up the child_page as needing moderation, without making a request
"""
self.child_page.save_revision(user=self.submitter, submitted_for_moderation=True)
self.revision = self.child_page.get_latest_revision()
def approve(self):
return self.client.post(reverse('wagtailadmin_pages:approve_moderation', args=(self.revision.id, )))
def reject(self):
return self.client.post(reverse('wagtailadmin_pages:reject_moderation', args=(self.revision.id, )))
def test_vanilla_profile(self):
# Check that the vanilla profile has rejected notifications on
self.assertEqual(self.submitter_profile.rejected_notifications, True)
# Check that the vanilla profile has approved notifications on
self.assertEqual(self.submitter_profile.approved_notifications, True)
def test_submit_notifications_sent(self):
# Submit
self.submit()
# Check that both the moderators got an email, and no others
self.assertEqual(len(mail.outbox), 2)
email_to = mail.outbox[0].to + mail.outbox[1].to
self.assertIn(self.moderator.email, email_to)
self.assertIn(self.moderator2.email, email_to)
self.assertEqual(len(mail.outbox[0].to), 1)
self.assertEqual(len(mail.outbox[1].to), 1)
def test_submit_notification_preferences_respected(self):
# moderator2 doesn't want emails
self.moderator2_profile.submitted_notifications = False
self.moderator2_profile.save()
# Submit
self.submit()
# Check that only one moderator got an email
self.assertEqual(len(mail.outbox), 1)
self.assertEqual([self.moderator.email], mail.outbox[0].to)
def test_approved_notifications(self):
# Set up the page version
self.silent_submit()
# Approve
self.approve()
# Submitter must receive an approved email
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['submitter@email.com'])
self.assertEqual(mail.outbox[0].subject, 'The page "Hello world!" has been approved')
def test_approved_notifications_preferences_respected(self):
# Submitter doesn't want 'approved' emails
self.submitter_profile.approved_notifications = False
self.submitter_profile.save()
# Set up the page version
self.silent_submit()
# Approve
self.approve()
# No email to send
self.assertEqual(len(mail.outbox), 0)
def test_rejected_notifications(self):
# Set up the page version
self.silent_submit()
# Reject
self.reject()
# Submitter must receive a rejected email
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['submitter@email.com'])
self.assertEqual(mail.outbox[0].subject, 'The page "Hello world!" has been rejected')
def test_rejected_notification_preferences_respected(self):
# Submitter doesn't want 'rejected' emails
self.submitter_profile.rejected_notifications = False
self.submitter_profile.save()
# Set up the page version
self.silent_submit()
# Reject
self.reject()
# No email to send
self.assertEqual(len(mail.outbox), 0)
def test_moderator_group_notifications(self):
# Create a (non-superuser) moderator
User = get_user_model()
user1 = User.objects.create_user('moduser1', 'moduser1@email.com')
user1.groups.add(Group.objects.get(name='Moderators'))
user1.save()
# Create another group and user with permission to moderate
modgroup2 = Group.objects.create(name='More moderators')
GroupPagePermission.objects.create(
group=modgroup2, page=self.root_page, permission_type='publish'
)
user2 = User.objects.create_user('moduser2', 'moduser2@email.com')
user2.groups.add(Group.objects.get(name='More moderators'))
user2.save()
# Submit
# This used to break in Wagtail 1.3 (Postgres exception, SQLite 3/4 notifications)
response = self.submit()
# Should be redirected to explorer page
self.assertEqual(response.status_code, 302)
# Check that the superusers and the moderation group members all got an email
expected_emails = 4
self.assertEqual(len(mail.outbox), expected_emails)
email_to = []
for i in range(expected_emails):
self.assertEqual(len(mail.outbox[i].to), 1)
email_to += mail.outbox[i].to
self.assertIn(self.moderator.email, email_to)
self.assertIn(self.moderator2.email, email_to)
self.assertIn(user1.email, email_to)
self.assertIn(user2.email, email_to)
@override_settings(WAGTAILADMIN_NOTIFICATION_INCLUDE_SUPERUSERS=False)
def test_disable_superuser_notification(self):
# Add one of the superusers to the moderator group
self.moderator.groups.add(Group.objects.get(name='Moderators'))
response = self.submit()
# Should be redirected to explorer page
self.assertEqual(response.status_code, 302)
# Check that the non-moderator superuser is not being notified
expected_emails = 1
self.assertEqual(len(mail.outbox), expected_emails)
# Use chain as the 'to' field is a list of recipients
email_to = list(chain.from_iterable([m.to for m in mail.outbox]))
self.assertIn(self.moderator.email, email_to)
self.assertNotIn(self.moderator2.email, email_to)
@mock.patch('wagtail.admin.utils.django_send_mail', side_effect=IOError('Server down'))
def test_email_send_error(self, mock_fn):
logging.disable(logging.CRITICAL)
# Approve
self.silent_submit()
response = self.approve()
logging.disable(logging.NOTSET)
# An email that fails to send should return a message rather than crash the page
self.assertEqual(response.status_code, 302)
response = self.client.get(reverse('wagtailadmin_home'))
# There should be one "approved" message and one "failed to send notifications"
messages = list(response.context['messages'])
self.assertEqual(len(messages), 2)
self.assertEqual(messages[0].level, message_constants.SUCCESS)
self.assertEqual(messages[1].level, message_constants.ERROR)
class TestLocking(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Login
self.user = self.login()
# Create a page and submit it for moderation
self.child_page = SimplePage(
title="Hello world!",
slug='hello-world',
content="hello",
live=False,
)
self.root_page.add_child(instance=self.child_page)
def test_lock_post(self):
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )))
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is locked
self.assertTrue(Page.objects.get(id=self.child_page.id).locked)
def test_lock_get(self):
response = self.client.get(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )))
# Check response
self.assertEqual(response.status_code, 405)
# Check that the page is still unlocked
self.assertFalse(Page.objects.get(id=self.child_page.id).locked)
def test_lock_post_already_locked(self):
# Lock the page
self.child_page.locked = True
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )))
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is still locked
self.assertTrue(Page.objects.get(id=self.child_page.id).locked)
def test_lock_post_with_good_redirect(self):
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )), {
'next': reverse('wagtailadmin_pages:edit', args=(self.child_page.id, ))
})
# Check response
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )))
# Check that the page is locked
self.assertTrue(Page.objects.get(id=self.child_page.id).locked)
def test_lock_post_with_bad_redirect(self):
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )), {
'next': 'http://www.google.co.uk'
})
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is locked
self.assertTrue(Page.objects.get(id=self.child_page.id).locked)
def test_lock_post_bad_page(self):
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(9999, )))
# Check response
self.assertEqual(response.status_code, 404)
# Check that the page is still unlocked
self.assertFalse(Page.objects.get(id=self.child_page.id).locked)
def test_lock_post_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )))
# Check response
self.assertEqual(response.status_code, 403)
# Check that the page is still unlocked
self.assertFalse(Page.objects.get(id=self.child_page.id).locked)
def test_unlock_post(self):
# Lock the page
self.child_page.locked = True
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )))
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is unlocked
self.assertFalse(Page.objects.get(id=self.child_page.id).locked)
def test_unlock_get(self):
# Lock the page
self.child_page.locked = True
self.child_page.save()
response = self.client.get(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )))
# Check response
self.assertEqual(response.status_code, 405)
# Check that the page is still locked
self.assertTrue(Page.objects.get(id=self.child_page.id).locked)
def test_unlock_post_already_unlocked(self):
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )))
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is still unlocked
self.assertFalse(Page.objects.get(id=self.child_page.id).locked)
def test_unlock_post_with_good_redirect(self):
# Lock the page
self.child_page.locked = True
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )), {
'next': reverse('wagtailadmin_pages:edit', args=(self.child_page.id, ))
})
# Check response
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )))
# Check that the page is unlocked
self.assertFalse(Page.objects.get(id=self.child_page.id).locked)
def test_unlock_post_with_bad_redirect(self):
# Lock the page
self.child_page.locked = True
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )), {
'next': 'http://www.google.co.uk'
})
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is unlocked
self.assertFalse(Page.objects.get(id=self.child_page.id).locked)
def test_unlock_post_bad_page(self):
# Lock the page
self.child_page.locked = True
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(9999, )))
# Check response
self.assertEqual(response.status_code, 404)
# Check that the page is still locked
self.assertTrue(Page.objects.get(id=self.child_page.id).locked)
def test_unlock_post_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Lock the page
self.child_page.locked = True
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )))
# Check response
self.assertEqual(response.status_code, 403)
# Check that the page is still locked
self.assertTrue(Page.objects.get(id=self.child_page.id).locked)
class TestIssue197(TestCase, WagtailTestUtils):
def test_issue_197(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Create a tagged page with no tags
self.tagged_page = self.root_page.add_child(instance=TaggedPage(
title="Tagged page",
slug='tagged-page',
live=False,
))
# Login
self.user = self.login()
# Add some tags and publish using edit view
post_data = {
'title': "Tagged page",
'slug': 'tagged-page',
'tags': "hello, world",
'action-publish': "Publish",
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.tagged_page.id, )), post_data)
# Should be redirected to explorer
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that both tags are in the pages tag set
page = TaggedPage.objects.get(id=self.tagged_page.id)
self.assertIn('hello', page.tags.slugs())
self.assertIn('world', page.tags.slugs())
class TestChildRelationsOnSuperclass(TestCase, WagtailTestUtils):
# In our test models we define AdvertPlacement as a child relation on the Page model.
# Here we check that this behaves correctly when exposed on the edit form of a Page
# subclass (StandardIndex here).
fixtures = ['test.json']
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
self.test_advert = Advert.objects.get(id=1)
# Add child page
self.index_page = StandardIndex(
title="My lovely index",
slug="my-lovely-index",
advert_placements=[AdvertPlacement(advert=self.test_advert)]
)
self.root_page.add_child(instance=self.index_page)
# Login
self.login()
def test_get_create_form(self):
response = self.client.get(
reverse('wagtailadmin_pages:add', args=('tests', 'standardindex', self.root_page.id))
)
self.assertEqual(response.status_code, 200)
# Response should include an advert_placements formset labelled Adverts
self.assertContains(response, "Adverts")
self.assertContains(response, "id_advert_placements-TOTAL_FORMS")
def test_post_create_form(self):
post_data = {
'title': "New index!",
'slug': 'new-index',
'advert_placements-TOTAL_FORMS': '1',
'advert_placements-INITIAL_FORMS': '0',
'advert_placements-MAX_NUM_FORMS': '1000',
'advert_placements-0-advert': '1',
'advert_placements-0-colour': 'yellow',
'advert_placements-0-id': '',
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'standardindex', self.root_page.id)), post_data
)
# Find the page and check it
page = Page.objects.get(path__startswith=self.root_page.path, slug='new-index').specific
# Should be redirected to edit page
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(page.id, )))
self.assertEqual(page.advert_placements.count(), 1)
self.assertEqual(page.advert_placements.first().advert.text, 'test_advert')
def test_post_create_form_with_validation_error_in_formset(self):
post_data = {
'title': "New index!",
'slug': 'new-index',
'advert_placements-TOTAL_FORMS': '1',
'advert_placements-INITIAL_FORMS': '0',
'advert_placements-MAX_NUM_FORMS': '1000',
'advert_placements-0-advert': '1',
'advert_placements-0-colour': '', # should fail as colour is a required field
'advert_placements-0-id': '',
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'standardindex', self.root_page.id)), post_data
)
# Should remain on the edit page with a validation error
self.assertEqual(response.status_code, 200)
self.assertContains(response, "This field is required.")
# form should be marked as having unsaved changes
self.assertContains(response, "alwaysDirty: true")
def test_get_edit_form(self):
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.index_page.id, )))
self.assertEqual(response.status_code, 200)
# Response should include an advert_placements formset labelled Adverts
self.assertContains(response, "Adverts")
self.assertContains(response, "id_advert_placements-TOTAL_FORMS")
# the formset should be populated with an existing form
self.assertContains(response, "id_advert_placements-0-advert")
self.assertContains(
response, '<option value="1" selected="selected">test_advert</option>', html=True
)
def test_post_edit_form(self):
post_data = {
'title': "My lovely index",
'slug': 'my-lovely-index',
'advert_placements-TOTAL_FORMS': '2',
'advert_placements-INITIAL_FORMS': '1',
'advert_placements-MAX_NUM_FORMS': '1000',
'advert_placements-0-advert': '1',
'advert_placements-0-colour': 'yellow',
'advert_placements-0-id': self.index_page.advert_placements.first().id,
'advert_placements-1-advert': '1',
'advert_placements-1-colour': 'purple',
'advert_placements-1-id': '',
'action-publish': "Publish",
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.index_page.id, )), post_data)
# Should be redirected to explorer
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Find the page and check it
page = Page.objects.get(id=self.index_page.id).specific
self.assertEqual(page.advert_placements.count(), 2)
self.assertEqual(page.advert_placements.all()[0].advert.text, 'test_advert')
self.assertEqual(page.advert_placements.all()[1].advert.text, 'test_advert')
def test_post_edit_form_with_validation_error_in_formset(self):
post_data = {
'title': "My lovely index",
'slug': 'my-lovely-index',
'advert_placements-TOTAL_FORMS': '1',
'advert_placements-INITIAL_FORMS': '1',
'advert_placements-MAX_NUM_FORMS': '1000',
'advert_placements-0-advert': '1',
'advert_placements-0-colour': '',
'advert_placements-0-id': self.index_page.advert_placements.first().id,
'action-publish': "Publish",
}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.index_page.id, )), post_data)
# Should remain on the edit page with a validation error
self.assertEqual(response.status_code, 200)
self.assertContains(response, "This field is required.")
# form should be marked as having unsaved changes
self.assertContains(response, "alwaysDirty: true")
class TestRevisions(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.christmas_event = EventPage.objects.get(url_path='/home/events/christmas/')
self.christmas_event.title = "Last Christmas"
self.christmas_event.date_from = '2013-12-25'
self.christmas_event.body = (
"<p>Last Christmas I gave you my heart, "
"but the very next day you gave it away</p>"
)
self.last_christmas_revision = self.christmas_event.save_revision()
self.last_christmas_revision.created_at = local_datetime(2013, 12, 25)
self.last_christmas_revision.save()
self.christmas_event.title = "This Christmas"
self.christmas_event.date_from = '2014-12-25'
self.christmas_event.body = (
"<p>This year, to save me from tears, "
"I'll give it to someone special</p>"
)
self.this_christmas_revision = self.christmas_event.save_revision()
self.this_christmas_revision.created_at = local_datetime(2014, 12, 25)
self.this_christmas_revision.save()
self.login()
def test_edit_form_has_revisions_link(self):
response = self.client.get(
reverse('wagtailadmin_pages:edit', args=(self.christmas_event.id, ))
)
self.assertEqual(response.status_code, 200)
revisions_index_url = reverse(
'wagtailadmin_pages:revisions_index', args=(self.christmas_event.id, )
)
self.assertContains(response, revisions_index_url)
def test_get_revisions_index(self):
response = self.client.get(
reverse('wagtailadmin_pages:revisions_index', args=(self.christmas_event.id, ))
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, formats.localize(parse_date('2013-12-25')))
last_christmas_preview_url = reverse(
'wagtailadmin_pages:revisions_view',
args=(self.christmas_event.id, self.last_christmas_revision.id)
)
last_christmas_revert_url = reverse(
'wagtailadmin_pages:revisions_revert',
args=(self.christmas_event.id, self.last_christmas_revision.id)
)
self.assertContains(response, last_christmas_preview_url)
self.assertContains(response, last_christmas_revert_url)
self.assertContains(response, formats.localize(local_datetime(2014, 12, 25)))
this_christmas_preview_url = reverse(
'wagtailadmin_pages:revisions_view',
args=(self.christmas_event.id, self.this_christmas_revision.id)
)
this_christmas_revert_url = reverse(
'wagtailadmin_pages:revisions_revert',
args=(self.christmas_event.id, self.this_christmas_revision.id)
)
self.assertContains(response, this_christmas_preview_url)
self.assertContains(response, this_christmas_revert_url)
def test_preview_revision(self):
last_christmas_preview_url = reverse(
'wagtailadmin_pages:revisions_view',
args=(self.christmas_event.id, self.last_christmas_revision.id)
)
response = self.client.get(last_christmas_preview_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Last Christmas I gave you my heart")
def test_revert_revision(self):
last_christmas_preview_url = reverse(
'wagtailadmin_pages:revisions_revert',
args=(self.christmas_event.id, self.last_christmas_revision.id)
)
response = self.client.get(last_christmas_preview_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Editing Event page")
self.assertContains(response, "You are viewing a previous revision of this page")
# Form should show the content of the revision, not the current draft
self.assertContains(response, "Last Christmas I gave you my heart")
# Form should include a hidden 'revision' field
revision_field = (
"""<input type="hidden" name="revision" value="%d" />""" %
self.last_christmas_revision.id
)
self.assertContains(response, revision_field)
# Buttons should be relabelled
self.assertContains(response, "Replace current draft")
self.assertContains(response, "Publish this revision")
def test_scheduled_revision(self):
self.last_christmas_revision.publish()
self.this_christmas_revision.approved_go_live_at = local_datetime(2014, 12, 26)
self.this_christmas_revision.save()
this_christmas_unschedule_url = reverse(
'wagtailadmin_pages:revisions_unschedule',
args=(self.christmas_event.id, self.this_christmas_revision.id)
)
response = self.client.get(
reverse('wagtailadmin_pages:revisions_index', args=(self.christmas_event.id, ))
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Scheduled for')
self.assertContains(response, formats.localize(parse_date('2014-12-26')))
self.assertContains(response, this_christmas_unschedule_url)
class TestCompareRevisions(TestCase, WagtailTestUtils):
# Actual tests for the comparison classes can be found in test_compare.py
fixtures = ['test.json']
def setUp(self):
self.christmas_event = EventPage.objects.get(url_path='/home/events/christmas/')
self.christmas_event.title = "Last Christmas"
self.christmas_event.date_from = '2013-12-25'
self.christmas_event.body = (
"<p>Last Christmas I gave you my heart, "
"but the very next day you gave it away</p>"
)
self.last_christmas_revision = self.christmas_event.save_revision()
self.last_christmas_revision.created_at = local_datetime(2013, 12, 25)
self.last_christmas_revision.save()
self.christmas_event.title = "This Christmas"
self.christmas_event.date_from = '2014-12-25'
self.christmas_event.body = (
"<p>This year, to save me from tears, "
"I'll give it to someone special</p>"
)
self.this_christmas_revision = self.christmas_event.save_revision()
self.this_christmas_revision.created_at = local_datetime(2014, 12, 25)
self.this_christmas_revision.save()
self.login()
def test_compare_revisions(self):
compare_url = reverse(
'wagtailadmin_pages:revisions_compare',
args=(self.christmas_event.id, self.last_christmas_revision.id, self.this_christmas_revision.id)
)
response = self.client.get(compare_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<span class="deletion">Last Christmas I gave you my heart, but the very next day you gave it away</span><span class="addition">This year, to save me from tears, I'll give it to someone special</span>')
def test_compare_revisions_earliest(self):
compare_url = reverse(
'wagtailadmin_pages:revisions_compare',
args=(self.christmas_event.id, 'earliest', self.this_christmas_revision.id)
)
response = self.client.get(compare_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<span class="deletion">Last Christmas I gave you my heart, but the very next day you gave it away</span><span class="addition">This year, to save me from tears, I'll give it to someone special</span>')
def test_compare_revisions_latest(self):
compare_url = reverse(
'wagtailadmin_pages:revisions_compare',
args=(self.christmas_event.id, self.last_christmas_revision.id, 'latest')
)
response = self.client.get(compare_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<span class="deletion">Last Christmas I gave you my heart, but the very next day you gave it away</span><span class="addition">This year, to save me from tears, I'll give it to someone special</span>')
def test_compare_revisions_live(self):
# Mess with the live version, bypassing revisions
self.christmas_event.body = (
"<p>This year, to save me from tears, "
"I'll just feed it to the dog</p>"
)
self.christmas_event.save(update_fields=['body'])
compare_url = reverse(
'wagtailadmin_pages:revisions_compare',
args=(self.christmas_event.id, self.last_christmas_revision.id, 'live')
)
response = self.client.get(compare_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<span class="deletion">Last Christmas I gave you my heart, but the very next day you gave it away</span><span class="addition">This year, to save me from tears, I'll just feed it to the dog</span>')
class TestRevisionsUnschedule(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.christmas_event = EventPage.objects.get(url_path='/home/events/christmas/')
self.christmas_event.title = "Last Christmas"
self.christmas_event.date_from = '2013-12-25'
self.christmas_event.body = (
"<p>Last Christmas I gave you my heart, "
"but the very next day you gave it away</p>"
)
self.last_christmas_revision = self.christmas_event.save_revision()
self.last_christmas_revision.created_at = local_datetime(2013, 12, 25)
self.last_christmas_revision.save()
self.last_christmas_revision.publish()
self.christmas_event.title = "This Christmas"
self.christmas_event.date_from = '2014-12-25'
self.christmas_event.body = (
"<p>This year, to save me from tears, "
"I'll give it to someone special</p>"
)
self.this_christmas_revision = self.christmas_event.save_revision()
self.this_christmas_revision.created_at = local_datetime(2014, 12, 24)
self.this_christmas_revision.save()
self.this_christmas_revision.approved_go_live_at = local_datetime(2014, 12, 25)
self.this_christmas_revision.save()
self.user = self.login()
def test_unschedule_view(self):
"""
This tests that the unschedule view responds with a confirm page
"""
response = self.client.get(reverse('wagtailadmin_pages:revisions_unschedule', args=(self.christmas_event.id, self.this_christmas_revision.id)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/revisions/confirm_unschedule.html')
def test_unschedule_view_invalid_page_id(self):
"""
This tests that the unschedule view returns an error if the page id is invalid
"""
# Get unschedule page
response = self.client.get(reverse('wagtailadmin_pages:revisions_unschedule', args=(12345, 67894)))
# Check that the user received a 404 response
self.assertEqual(response.status_code, 404)
def test_unschedule_view_invalid_revision_id(self):
"""
This tests that the unschedule view returns an error if the page id is invalid
"""
# Get unschedule page
response = self.client.get(reverse('wagtailadmin_pages:revisions_unschedule', args=(self.christmas_event.id, 67894)))
# Check that the user received a 404 response
self.assertEqual(response.status_code, 404)
def test_unschedule_view_bad_permissions(self):
"""
This tests that the unschedule view doesn't allow users without publish permissions
"""
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get unschedule page
response = self.client.get(reverse('wagtailadmin_pages:revisions_unschedule', args=(self.christmas_event.id, self.this_christmas_revision.id)))
# Check that the user received a 403 response
self.assertEqual(response.status_code, 403)
def test_unschedule_view_post(self):
"""
This posts to the unschedule view and checks that the revision was unscheduled
"""
# Post to the unschedule page
response = self.client.post(reverse('wagtailadmin_pages:revisions_unschedule', args=(self.christmas_event.id, self.this_christmas_revision.id)))
# Should be redirected to revisions index page
self.assertRedirects(response, reverse('wagtailadmin_pages:revisions_index', args=(self.christmas_event.id, )))
# Check that the page has no approved_schedule
self.assertFalse(EventPage.objects.get(id=self.christmas_event.id).approved_schedule)
# Check that the approved_go_live_at has been cleared from the revision
self.assertIsNone(self.christmas_event.revisions.get(id=self.this_christmas_revision.id).approved_go_live_at)
class TestIssue2599(TestCase, WagtailTestUtils):
"""
When previewing a page on creation, we need to assign it a path value consistent with its
(future) position in the tree. The naive way of doing this is to give it an index number
one more than numchild - however, index numbers are not reassigned on page deletion, so
this can result in a path that collides with an existing page (which is invalid).
"""
def test_issue_2599(self):
homepage = Page.objects.get(id=2)
child1 = Page(title='child1')
homepage.add_child(instance=child1)
child2 = Page(title='child2')
homepage.add_child(instance=child2)
child1.delete()
self.login()
post_data = {
'title': "New page!",
'content': "Some content",
'slug': 'hello-world',
'action-submit': "Submit",
}
preview_url = reverse('wagtailadmin_pages:preview_on_add',
args=('tests', 'simplepage', homepage.id))
response = self.client.post(preview_url, post_data)
# Check the JSON response
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(response.content.decode(), {'is_valid': True})
response = self.client.get(preview_url)
# Check the HTML response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'tests/simple_page.html')
self.assertContains(response, "New page!")
# Check that the treebeard attributes were set correctly on the page object
self.assertEqual(response.context['self'].depth, homepage.depth + 1)
self.assertTrue(response.context['self'].path.startswith(homepage.path))
self.assertEqual(response.context['self'].get_parent(), homepage)
class TestIssue2492(TestCase, WagtailTestUtils):
"""
The publication submission message generation was performed using
the Page class, as opposed to the specific_class for that Page.
This test ensures that the specific_class url method is called
when the 'view live' message button is created.
"""
def setUp(self):
self.root_page = Page.objects.get(id=2)
child_page = SingleEventPage(
title="Test Event", slug="test-event", location="test location",
cost="10", date_from=datetime.datetime.now(),
audience=EVENT_AUDIENCE_CHOICES[0][0])
self.root_page.add_child(instance=child_page)
child_page.save_revision().publish()
self.child_page = SingleEventPage.objects.get(id=child_page.id)
self.user = self.login()
def test_page_edit_post_publish_url(self):
post_data = {
'action-publish': "Publish",
'title': self.child_page.title,
'date_from': self.child_page.date_from,
'slug': self.child_page.slug,
'audience': self.child_page.audience,
'location': self.child_page.location,
'cost': self.child_page.cost,
'carousel_items-TOTAL_FORMS': 0,
'carousel_items-INITIAL_FORMS': 0,
'carousel_items-MIN_NUM_FORMS': 0,
'carousel_items-MAX_NUM_FORMS': 0,
'speakers-TOTAL_FORMS': 0,
'speakers-INITIAL_FORMS': 0,
'speakers-MIN_NUM_FORMS': 0,
'speakers-MAX_NUM_FORMS': 0,
'related_links-TOTAL_FORMS': 0,
'related_links-INITIAL_FORMS': 0,
'related_links-MIN_NUM_FORMS': 0,
'related_links-MAX_NUM_FORMS': 0,
'head_counts-TOTAL_FORMS': 0,
'head_counts-INITIAL_FORMS': 0,
'head_counts-MIN_NUM_FORMS': 0,
'head_counts-MAX_NUM_FORMS': 0,
}
response = self.client.post(
reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )),
post_data, follow=True)
# Grab a fresh copy's URL
new_url = SingleEventPage.objects.get(id=self.child_page.id).url
# The "View Live" button should have the custom URL.
for message in response.context['messages']:
self.assertIn('"{}"'.format(new_url), message.message)
break
class TestIssue3982(TestCase, WagtailTestUtils):
"""
Pages that are not associated with a site, and thus do not have a live URL,
should not display a "View live" link in the flash message after being
edited.
"""
def setUp(self):
super().setUp()
self.login()
def _create_page(self, parent):
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', parent.pk)),
{'title': "Hello, world!", 'content': "Some content", 'slug': 'hello-world', 'action-publish': "publish"},
follow=True)
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(parent.pk,)))
page = SimplePage.objects.get()
self.assertTrue(page.live)
return response, page
def test_create_accessible(self):
"""
Create a page under the site root, check the flash message has a valid
"View live" button.
"""
response, page = self._create_page(Page.objects.get(pk=2))
self.assertIsNotNone(page.url)
self.assertTrue(any(
'View live' in message.message and page.url in message.message
for message in response.context['messages']))
def test_create_inaccessible(self):
"""
Create a page outside of the site root, check the flash message does
not have a "View live" button.
"""
response, page = self._create_page(Page.objects.get(pk=1))
self.assertIsNone(page.url)
self.assertFalse(any(
'View live' in message.message
for message in response.context['messages']))
def _edit_page(self, parent):
page = parent.add_child(instance=SimplePage(title='Hello, world!', content='Some content'))
response = self.client.post(
reverse('wagtailadmin_pages:edit', args=(page.pk,)),
{'title': "Hello, world!", 'content': "Some content", 'slug': 'hello-world', 'action-publish': "publish"},
follow=True)
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(parent.pk,)))
page = SimplePage.objects.get(pk=page.pk)
self.assertTrue(page.live)
return response, page
def test_edit_accessible(self):
"""
Edit a page under the site root, check the flash message has a valid
"View live" button.
"""
response, page = self._edit_page(Page.objects.get(pk=2))
self.assertIsNotNone(page.url)
self.assertTrue(any(
'View live' in message.message and page.url in message.message
for message in response.context['messages']))
def test_edit_inaccessible(self):
"""
Edit a page outside of the site root, check the flash message does
not have a "View live" button.
"""
response, page = self._edit_page(Page.objects.get(pk=1))
self.assertIsNone(page.url)
self.assertFalse(any(
'View live' in message.message
for message in response.context['messages']))
def _approve_page(self, parent):
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', parent.pk)),
{'title': "Hello, world!", 'content': "Some content", 'slug': 'hello-world', 'action-submit': "submit"},
follow=True)
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(parent.pk,)))
page = SimplePage.objects.get()
self.assertFalse(page.live)
revision = PageRevision.objects.get(page=page)
response = self.client.post(reverse('wagtailadmin_pages:approve_moderation', args=(revision.pk,)), follow=True)
page = SimplePage.objects.get()
self.assertTrue(page.live)
self.assertRedirects(response, reverse('wagtailadmin_home'))
return response, page
def test_approve_accessible(self):
"""
Edit a page under the site root, check the flash message has a valid
"View live" button.
"""
response, page = self._approve_page(Page.objects.get(pk=2))
self.assertIsNotNone(page.url)
self.assertTrue(any(
'View live' in message.message and page.url in message.message
for message in response.context['messages']))
def test_approve_inaccessible(self):
"""
Edit a page outside of the site root, check the flash message does
not have a "View live" button.
"""
response, page = self._approve_page(Page.objects.get(pk=1))
self.assertIsNone(page.url)
self.assertFalse(any(
'View live' in message.message
for message in response.context['messages']))
class TestInlinePanelMedia(TestCase, WagtailTestUtils):
"""
Test that form media required by InlinePanels is correctly pulled in to the edit page
"""
def test_inline_panel_media(self):
homepage = Page.objects.get(id=2)
self.login()
# simplepage does not need hallo...
response = self.client.get(reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', homepage.id)))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'wagtailadmin/js/hallo-bootstrap.js')
# but sectionedrichtextpage does
response = self.client.get(reverse('wagtailadmin_pages:add', args=('tests', 'sectionedrichtextpage', homepage.id)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'wagtailadmin/js/hallo-bootstrap.js')
class TestInlineStreamField(TestCase, WagtailTestUtils):
"""
Test that streamfields inside an inline child work
"""
def test_inline_streamfield(self):
homepage = Page.objects.get(id=2)
self.login()
response = self.client.get(reverse('wagtailadmin_pages:add', args=('tests', 'inlinestreampage', homepage.id)))
self.assertEqual(response.status_code, 200)
# response should include HTML declarations for streamfield child blocks
self.assertContains(response, '<li id="__PREFIX__-container" class="sequence-member">')
class TestRecentEditsPanel(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Add child page
child_page = SimplePage(
title="Hello world!",
slug="hello-world",
content="Some content here",
)
self.root_page.add_child(instance=child_page)
child_page.save_revision().publish()
self.child_page = SimplePage.objects.get(id=child_page.id)
get_user_model().objects.create_superuser(username='alice', email='alice@email.com', password='password')
get_user_model().objects.create_superuser(username='bob', email='bob@email.com', password='password')
def change_something(self, title):
post_data = {'title': title, 'content': "Some content", 'slug': 'hello-world'}
response = self.client.post(reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )), post_data)
# Should be redirected to edit page
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )))
# The page should have "has_unpublished_changes" flag set
child_page_new = SimplePage.objects.get(id=self.child_page.id)
self.assertTrue(child_page_new.has_unpublished_changes)
def go_to_dashboard_response(self):
response = self.client.get(reverse('wagtailadmin_home'))
self.assertEqual(response.status_code, 200)
return response
def test_your_recent_edits(self):
# Login as Bob
self.client.login(username='bob', password='password')
# Bob hasn't edited anything yet
response = self.client.get(reverse('wagtailadmin_home'))
self.assertNotIn('Your most recent edits', response.content.decode('utf-8'))
# Login as Alice
self.client.logout()
self.client.login(username='alice', password='password')
# Alice changes something
self.change_something("Alice's edit")
# Edit should show up on dashboard
response = self.go_to_dashboard_response()
self.assertIn('Your most recent edits', response.content.decode('utf-8'))
# Bob changes something
self.client.login(username='bob', password='password')
self.change_something("Bob's edit")
# Edit shows up on Bobs dashboard
response = self.go_to_dashboard_response()
self.assertIn('Your most recent edits', response.content.decode('utf-8'))
# Login as Alice again
self.client.logout()
self.client.login(username='alice', password='password')
# Alice's dashboard should still list that first edit
response = self.go_to_dashboard_response()
self.assertIn('Your most recent edits', response.content.decode('utf-8'))
def test_panel(self):
"""Test if the panel actually returns expected pages """
self.client.login(username='bob', password='password')
# change a page
self.change_something("Bob's edit")
# set a user to 'mock' a request
self.client.user = get_user_model().objects.get(email='bob@email.com')
# get the panel to get the last edits
panel = RecentEditsPanel(self.client)
# check if the revision is the revision of edited Page
self.assertEqual(panel.last_edits[0][0].page, Page.objects.get(pk=self.child_page.id))
# check if the page in this list is the specific page of this revision
self.assertEqual(panel.last_edits[0][1], Page.objects.get(pk=self.child_page.id).specific)
class TestIssue2994(TestCase, WagtailTestUtils):
"""
In contrast to most "standard" form fields, StreamField form widgets generally won't
provide a postdata field with a name exactly matching the field name. To prevent Django
from wrongly interpreting this as the field being omitted from the form,
we need to provide a custom value_omitted_from_data method.
"""
def setUp(self):
self.root_page = Page.objects.get(id=2)
self.user = self.login()
def test_page_edit_post_publish_url(self):
# Post
post_data = {
'title': "Issue 2994 test",
'slug': 'issue-2994-test',
'body-count': '1',
'body-0-deleted': '',
'body-0-order': '0',
'body-0-type': 'text',
'body-0-value': 'hello world',
'action-publish': "Publish",
}
self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'defaultstreampage', self.root_page.id)), post_data
)
new_page = DefaultStreamPage.objects.get(slug='issue-2994-test')
self.assertEqual(1, len(new_page.body))
self.assertEqual('hello world', new_page.body[0].value)
class TestParentalM2M(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.events_index = Page.objects.get(url_path='/home/events/')
self.christmas_page = Page.objects.get(url_path='/home/events/christmas/')
self.user = self.login()
self.holiday_category = EventCategory.objects.create(name='Holiday')
self.men_with_beards_category = EventCategory.objects.create(name='Men with beards')
def test_create_and_save(self):
post_data = {
'title': "Presidents' Day",
'date_from': "2017-02-20",
'slug': "presidents-day",
'audience': "public",
'location': "America",
'cost': "$1",
'carousel_items-TOTAL_FORMS': 0,
'carousel_items-INITIAL_FORMS': 0,
'carousel_items-MIN_NUM_FORMS': 0,
'carousel_items-MAX_NUM_FORMS': 0,
'speakers-TOTAL_FORMS': 0,
'speakers-INITIAL_FORMS': 0,
'speakers-MIN_NUM_FORMS': 0,
'speakers-MAX_NUM_FORMS': 0,
'related_links-TOTAL_FORMS': 0,
'related_links-INITIAL_FORMS': 0,
'related_links-MIN_NUM_FORMS': 0,
'related_links-MAX_NUM_FORMS': 0,
'head_counts-TOTAL_FORMS': 0,
'head_counts-INITIAL_FORMS': 0,
'head_counts-MIN_NUM_FORMS': 0,
'head_counts-MAX_NUM_FORMS': 0,
'categories': [self.holiday_category.id, self.men_with_beards_category.id]
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'eventpage', self.events_index.id)),
post_data
)
created_page = EventPage.objects.get(url_path='/home/events/presidents-day/')
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(created_page.id, )))
created_revision = created_page.get_latest_revision_as_page()
self.assertIn(self.holiday_category, created_revision.categories.all())
self.assertIn(self.men_with_beards_category, created_revision.categories.all())
def test_create_and_publish(self):
post_data = {
'action-publish': "Publish",
'title': "Presidents' Day",
'date_from': "2017-02-20",
'slug': "presidents-day",
'audience': "public",
'location': "America",
'cost': "$1",
'carousel_items-TOTAL_FORMS': 0,
'carousel_items-INITIAL_FORMS': 0,
'carousel_items-MIN_NUM_FORMS': 0,
'carousel_items-MAX_NUM_FORMS': 0,
'speakers-TOTAL_FORMS': 0,
'speakers-INITIAL_FORMS': 0,
'speakers-MIN_NUM_FORMS': 0,
'speakers-MAX_NUM_FORMS': 0,
'related_links-TOTAL_FORMS': 0,
'related_links-INITIAL_FORMS': 0,
'related_links-MIN_NUM_FORMS': 0,
'related_links-MAX_NUM_FORMS': 0,
'head_counts-TOTAL_FORMS': 0,
'head_counts-INITIAL_FORMS': 0,
'head_counts-MIN_NUM_FORMS': 0,
'head_counts-MAX_NUM_FORMS': 0,
'categories': [self.holiday_category.id, self.men_with_beards_category.id]
}
response = self.client.post(
reverse('wagtailadmin_pages:add', args=('tests', 'eventpage', self.events_index.id)),
post_data
)
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.events_index.id, )))
created_page = EventPage.objects.get(url_path='/home/events/presidents-day/')
self.assertIn(self.holiday_category, created_page.categories.all())
self.assertIn(self.men_with_beards_category, created_page.categories.all())
def test_edit_and_save(self):
post_data = {
'title': "Christmas",
'date_from': "2017-12-25",
'slug': "christmas",
'audience': "public",
'location': "The North Pole",
'cost': "Free",
'carousel_items-TOTAL_FORMS': 0,
'carousel_items-INITIAL_FORMS': 0,
'carousel_items-MIN_NUM_FORMS': 0,
'carousel_items-MAX_NUM_FORMS': 0,
'speakers-TOTAL_FORMS': 0,
'speakers-INITIAL_FORMS': 0,
'speakers-MIN_NUM_FORMS': 0,
'speakers-MAX_NUM_FORMS': 0,
'related_links-TOTAL_FORMS': 0,
'related_links-INITIAL_FORMS': 0,
'related_links-MIN_NUM_FORMS': 0,
'related_links-MAX_NUM_FORMS': 0,
'head_counts-TOTAL_FORMS': 0,
'head_counts-INITIAL_FORMS': 0,
'head_counts-MIN_NUM_FORMS': 0,
'head_counts-MAX_NUM_FORMS': 0,
'categories': [self.holiday_category.id, self.men_with_beards_category.id]
}
response = self.client.post(
reverse('wagtailadmin_pages:edit', args=(self.christmas_page.id, )),
post_data
)
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(self.christmas_page.id, )))
updated_page = EventPage.objects.get(id=self.christmas_page.id)
created_revision = updated_page.get_latest_revision_as_page()
self.assertIn(self.holiday_category, created_revision.categories.all())
self.assertIn(self.men_with_beards_category, created_revision.categories.all())
# no change to live page record yet
self.assertEqual(0, updated_page.categories.count())
def test_edit_and_publish(self):
post_data = {
'action-publish': "Publish",
'title': "Christmas",
'date_from': "2017-12-25",
'slug': "christmas",
'audience': "public",
'location': "The North Pole",
'cost': "Free",
'carousel_items-TOTAL_FORMS': 0,
'carousel_items-INITIAL_FORMS': 0,
'carousel_items-MIN_NUM_FORMS': 0,
'carousel_items-MAX_NUM_FORMS': 0,
'speakers-TOTAL_FORMS': 0,
'speakers-INITIAL_FORMS': 0,
'speakers-MIN_NUM_FORMS': 0,
'speakers-MAX_NUM_FORMS': 0,
'related_links-TOTAL_FORMS': 0,
'related_links-INITIAL_FORMS': 0,
'related_links-MIN_NUM_FORMS': 0,
'related_links-MAX_NUM_FORMS': 0,
'head_counts-TOTAL_FORMS': 0,
'head_counts-INITIAL_FORMS': 0,
'head_counts-MIN_NUM_FORMS': 0,
'head_counts-MAX_NUM_FORMS': 0,
'categories': [self.holiday_category.id, self.men_with_beards_category.id]
}
response = self.client.post(
reverse('wagtailadmin_pages:edit', args=(self.christmas_page.id, )),
post_data
)
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.events_index.id, )))
updated_page = EventPage.objects.get(id=self.christmas_page.id)
self.assertEqual(2, updated_page.categories.count())
self.assertIn(self.holiday_category, updated_page.categories.all())
self.assertIn(self.men_with_beards_category, updated_page.categories.all())
class TestValidationErrorMessages(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.events_index = Page.objects.get(url_path='/home/events/')
self.christmas_page = Page.objects.get(url_path='/home/events/christmas/')
self.user = self.login()
def test_field_error(self):
"""Field errors should be shown against the relevant fields, not in the header message"""
post_data = {
'title': "",
'date_from': "2017-12-25",
'slug': "christmas",
'audience': "public",
'location': "The North Pole",
'cost': "Free",
'carousel_items-TOTAL_FORMS': 0,
'carousel_items-INITIAL_FORMS': 0,
'carousel_items-MIN_NUM_FORMS': 0,
'carousel_items-MAX_NUM_FORMS': 0,
'speakers-TOTAL_FORMS': 0,
'speakers-INITIAL_FORMS': 0,
'speakers-MIN_NUM_FORMS': 0,
'speakers-MAX_NUM_FORMS': 0,
'related_links-TOTAL_FORMS': 0,
'related_links-INITIAL_FORMS': 0,
'related_links-MIN_NUM_FORMS': 0,
'related_links-MAX_NUM_FORMS': 0,
'head_counts-TOTAL_FORMS': 0,
'head_counts-INITIAL_FORMS': 0,
'head_counts-MIN_NUM_FORMS': 0,
'head_counts-MAX_NUM_FORMS': 0,
}
response = self.client.post(
reverse('wagtailadmin_pages:edit', args=(self.christmas_page.id, )),
post_data
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "The page could not be saved due to validation errors")
# the error should only appear once: against the field, not in the header message
self.assertContains(response, """<p class="error-message"><span>This field is required.</span></p>""", count=1, html=True)
self.assertContains(response, "This field is required", count=1)
def test_non_field_error(self):
"""Non-field errors should be shown in the header message"""
post_data = {
'title': "Christmas",
'date_from': "2017-12-25",
'date_to': "2017-12-24",
'slug': "christmas",
'audience': "public",
'location': "The North Pole",
'cost': "Free",
'carousel_items-TOTAL_FORMS': 0,
'carousel_items-INITIAL_FORMS': 0,
'carousel_items-MIN_NUM_FORMS': 0,
'carousel_items-MAX_NUM_FORMS': 0,
'speakers-TOTAL_FORMS': 0,
'speakers-INITIAL_FORMS': 0,
'speakers-MIN_NUM_FORMS': 0,
'speakers-MAX_NUM_FORMS': 0,
'related_links-TOTAL_FORMS': 0,
'related_links-INITIAL_FORMS': 0,
'related_links-MIN_NUM_FORMS': 0,
'related_links-MAX_NUM_FORMS': 0,
'head_counts-TOTAL_FORMS': 0,
'head_counts-INITIAL_FORMS': 0,
'head_counts-MIN_NUM_FORMS': 0,
'head_counts-MAX_NUM_FORMS': 0,
}
response = self.client.post(
reverse('wagtailadmin_pages:edit', args=(self.christmas_page.id, )),
post_data
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "The page could not be saved due to validation errors")
self.assertContains(response, "<li>The end date must be after the start date</li>", count=1)
def test_field_and_non_field_error(self):
"""
If both field and non-field errors exist, all errors should be shown in the header message
with appropriate context to identify the field; and field errors should also be shown
against the relevant fields.
"""
post_data = {
'title': "",
'date_from': "2017-12-25",
'date_to': "2017-12-24",
'slug': "christmas",
'audience': "public",
'location': "The North Pole",
'cost': "Free",
'carousel_items-TOTAL_FORMS': 0,
'carousel_items-INITIAL_FORMS': 0,
'carousel_items-MIN_NUM_FORMS': 0,
'carousel_items-MAX_NUM_FORMS': 0,
'speakers-TOTAL_FORMS': 0,
'speakers-INITIAL_FORMS': 0,
'speakers-MIN_NUM_FORMS': 0,
'speakers-MAX_NUM_FORMS': 0,
'related_links-TOTAL_FORMS': 0,
'related_links-INITIAL_FORMS': 0,
'related_links-MIN_NUM_FORMS': 0,
'related_links-MAX_NUM_FORMS': 0,
'head_counts-TOTAL_FORMS': 0,
'head_counts-INITIAL_FORMS': 0,
'head_counts-MIN_NUM_FORMS': 0,
'head_counts-MAX_NUM_FORMS': 0,
}
response = self.client.post(
reverse('wagtailadmin_pages:edit', args=(self.christmas_page.id, )),
post_data
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "The page could not be saved due to validation errors")
self.assertContains(response, "<li>The end date must be after the start date</li>", count=1)
# Error on title shown against the title field
self.assertContains(response, """<p class="error-message"><span>This field is required.</span></p>""", count=1, html=True)
# Error on title shown in the header message
self.assertContains(response, "<li>Title: This field is required.</li>", count=1)
class TestDraftAccess(TestCase, WagtailTestUtils):
"""Tests for the draft view access restrictions."""
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Add child page
self.child_page = SimplePage(
title="Hello world!",
slug="hello-world",
content="hello",
)
self.root_page.add_child(instance=self.child_page)
# create user with admin access (but not draft_view access)
user = get_user_model().objects.create_user(username='bob', email='bob@email.com', password='password')
user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
def test_draft_access_admin(self):
"""Test that admin can view draft."""
# Login as admin
self.user = self.login()
# Try getting page draft
response = self.client.get(reverse('wagtailadmin_pages:view_draft', args=(self.child_page.id, )))
# User can view
self.assertEqual(response.status_code, 200)
def test_draft_access_unauthorized(self):
"""Test that user without edit/publish permission can't view draft."""
self.assertTrue(self.client.login(username='bob', password='password'))
# Try getting page draft
response = self.client.get(reverse('wagtailadmin_pages:view_draft', args=(self.child_page.id, )))
# User gets Unauthorized response
self.assertEqual(response.status_code, 403)
def test_draft_access_authorized(self):
"""Test that user with edit permission can view draft."""
# give user the permission to edit page
user = get_user_model().objects.get(username='bob')
user.groups.add(Group.objects.get(name='Moderators'))
user.save()
self.assertTrue(self.client.login(username='bob', password='password'))
# Get add subpage page
response = self.client.get(reverse('wagtailadmin_pages:view_draft', args=(self.child_page.id, )))
# User can view
self.assertEqual(response.status_code, 200)
class TestPreview(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.meetings_category = EventCategory.objects.create(name='Meetings')
self.parties_category = EventCategory.objects.create(name='Parties')
self.holidays_category = EventCategory.objects.create(name='Holidays')
self.home_page = Page.objects.get(url_path='/home/')
self.event_page = Page.objects.get(url_path='/home/events/christmas/')
self.user = self.login()
self.post_data = {
'title': "Beach party",
'slug': 'beach-party',
'body': "party on wayne",
'date_from': '2017-08-01',
'audience': 'public',
'location': 'the beach',
'cost': 'six squid',
'carousel_items-TOTAL_FORMS': 0,
'carousel_items-INITIAL_FORMS': 0,
'carousel_items-MIN_NUM_FORMS': 0,
'carousel_items-MAX_NUM_FORMS': 0,
'speakers-TOTAL_FORMS': 0,
'speakers-INITIAL_FORMS': 0,
'speakers-MIN_NUM_FORMS': 0,
'speakers-MAX_NUM_FORMS': 0,
'related_links-TOTAL_FORMS': 0,
'related_links-INITIAL_FORMS': 0,
'related_links-MIN_NUM_FORMS': 0,
'related_links-MAX_NUM_FORMS': 0,
'head_counts-TOTAL_FORMS': 0,
'head_counts-INITIAL_FORMS': 0,
'head_counts-MIN_NUM_FORMS': 0,
'head_counts-MAX_NUM_FORMS': 0,
'categories': [self.parties_category.id, self.holidays_category.id],
}
def test_preview_on_create_with_m2m_field(self):
preview_url = reverse('wagtailadmin_pages:preview_on_add',
args=('tests', 'eventpage', self.home_page.id))
response = self.client.post(preview_url, self.post_data)
# Check the JSON response
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(response.content.decode(), {'is_valid': True})
response = self.client.get(preview_url)
# Check the HTML response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'tests/event_page.html')
self.assertContains(response, "Beach party")
self.assertContains(response, "<li>Parties</li>")
self.assertContains(response, "<li>Holidays</li>")
def test_preview_on_edit_with_m2m_field(self):
preview_url = reverse('wagtailadmin_pages:preview_on_edit',
args=(self.event_page.id,))
response = self.client.post(preview_url, self.post_data)
# Check the JSON response
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(response.content.decode(), {'is_valid': True})
response = self.client.get(preview_url)
# Check the HTML response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'tests/event_page.html')
self.assertContains(response, "Beach party")
self.assertContains(response, "<li>Parties</li>")
self.assertContains(response, "<li>Holidays</li>")
def test_preview_on_edit_expiry(self):
initial_datetime = timezone.now()
expiry_datetime = initial_datetime + datetime.timedelta(
seconds=PreviewOnEdit.preview_expiration_timeout + 1)
with freeze_time(initial_datetime) as frozen_datetime:
preview_url = reverse('wagtailadmin_pages:preview_on_edit',
args=(self.event_page.id,))
response = self.client.post(preview_url, self.post_data)
# Check the JSON response
self.assertEqual(response.status_code, 200)
response = self.client.get(preview_url)
# Check the HTML response
self.assertEqual(response.status_code, 200)
frozen_datetime.move_to(expiry_datetime)
preview_url = reverse('wagtailadmin_pages:preview_on_edit',
args=(self.home_page.id,))
response = self.client.post(preview_url, self.post_data)
self.assertEqual(response.status_code, 200)
response = self.client.get(preview_url)
self.assertEqual(response.status_code, 200)
| 42.458143 | 244 | 0.653543 |
793f02a33467af93eca32230eeaae944f518175d | 4,137 | py | Python | optapy-quickstarts/school-timetabling/constraints.py | kawael/optapy | f8721a57806c1527509716c63ab7c1baec4185af | [
"Apache-2.0"
] | 1 | 2021-10-08T13:38:56.000Z | 2021-10-08T13:38:56.000Z | optapy-quickstarts/school-timetabling/constraints.py | kawael/optapy | f8721a57806c1527509716c63ab7c1baec4185af | [
"Apache-2.0"
] | null | null | null | optapy-quickstarts/school-timetabling/constraints.py | kawael/optapy | f8721a57806c1527509716c63ab7c1baec4185af | [
"Apache-2.0"
] | null | null | null | from optapy import constraint_provider, get_class
from optapy.types import Joiners, HardSoftScore
from domain import Lesson, Room
from datetime import datetime, date, timedelta
LessonClass = get_class(Lesson)
RoomClass = get_class(Room)
# Trick since timedelta only works with datetime instances
today = date.today()
def within_30_minutes(lesson1, lesson2):
between = datetime.combine(today, lesson1.timeslot.end_time) - datetime.combine(today, lesson2.timeslot.start_time)
return timedelta(minutes=0) <= between <= timedelta(minutes=30)
@constraint_provider
def define_constraints(constraint_factory):
return [
# Hard constraints
room_conflict(constraint_factory),
teacher_conflict(constraint_factory),
student_group_conflict(constraint_factory),
# Soft constraints
teacher_room_stability(constraint_factory),
teacher_time_efficiency(constraint_factory),
student_group_subject_variety(constraint_factory)
]
def room_conflict(constraint_factory):
# A room can accommodate at most one lesson at the same time.
return constraint_factory \
.from_(LessonClass) \
.join(LessonClass,
[
# ... in the same timeslot ...
Joiners.equal(lambda lesson: lesson.timeslot),
# ... in the same room ...
Joiners.equal(lambda lesson: lesson.room),
# form unique pairs
Joiners.lessThan(lambda lesson: lesson.id)
]) \
.penalize("Room conflict", HardSoftScore.ONE_HARD)
def teacher_conflict(constraint_factory):
# A teacher can teach at most one lesson at the same time.
return constraint_factory \
.from_(LessonClass) \
.join(LessonClass,
[
Joiners.equal(lambda lesson: lesson.timeslot),
Joiners.equal(lambda lesson: lesson.teacher),
Joiners.lessThan(lambda lesson: lesson.id)
]) \
.penalize("Teacher conflict", HardSoftScore.ONE_HARD)
def student_group_conflict(constraint_factory):
# A student can attend at most one lesson at the same time.
return constraint_factory \
.from_(LessonClass) \
.join(LessonClass,
[
Joiners.equal(lambda lesson: lesson.timeslot),
Joiners.equal(lambda lesson: lesson.student_group),
Joiners.lessThan(lambda lesson: lesson.id)
]) \
.penalize("Student group conflict", HardSoftScore.ONE_HARD)
def teacher_room_stability(constraint_factory):
# A teacher prefers to teach in a single room.
return constraint_factory \
.from_(LessonClass) \
.join(LessonClass,
[
Joiners.equal(lambda lesson: lesson.teacher),
Joiners.lessThan(lambda lesson: lesson.id)
]) \
.filter(lambda lesson1, lesson2: lesson1.room != lesson2.room) \
.penalize("Teacher room stability", HardSoftScore.ONE_SOFT)
def teacher_time_efficiency(constraint_factory):
# A teacher prefers to teach sequential lessons and dislikes gaps between lessons.
return constraint_factory.from_(LessonClass) \
.join(LessonClass,
[
Joiners.equal(lambda lesson: lesson.teacher),
Joiners.equal(lambda lesson: lesson.timeslot.day_of_week)
]) \
.filter(within_30_minutes) \
.reward("Teacher time efficiency", HardSoftScore.ONE_SOFT)
def student_group_subject_variety(constraint_factory):
# A student group dislikes sequential lessons on the same subject.
return constraint_factory.from_(LessonClass) \
.join(LessonClass,
[
Joiners.equal(lambda lesson: lesson.subject),
Joiners.equal(lambda lesson: lesson.student_group),
Joiners.equal(lambda lesson: lesson.timeslot.day_of_week)
]) \
.filter(within_30_minutes) \
.penalize("Student group subject variety", HardSoftScore.ONE_SOFT)
| 37.609091 | 119 | 0.648779 |
793f039f572e854c10f687011b178d309d032292 | 158 | py | Python | web/app/face/admin.py | IlyaTorch/django-docker-ds | 91cff4efc0d219b88814ed140bcd3ac56ae2a0aa | [
"MIT"
] | null | null | null | web/app/face/admin.py | IlyaTorch/django-docker-ds | 91cff4efc0d219b88814ed140bcd3ac56ae2a0aa | [
"MIT"
] | null | null | null | web/app/face/admin.py | IlyaTorch/django-docker-ds | 91cff4efc0d219b88814ed140bcd3ac56ae2a0aa | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Url, BoundingBox
# Register your models here.
admin.site.register(Url)
admin.site.register(BoundingBox)
| 22.571429 | 36 | 0.810127 |
793f0573f2920d4263117743cfd5a11135a291f3 | 2,288 | py | Python | python3/koans/about_control_statements.py | theagoliveira/python_koans | e89436a16f412fef5ee448f2f5548728b43d6eca | [
"MIT"
] | null | null | null | python3/koans/about_control_statements.py | theagoliveira/python_koans | e89436a16f412fef5ee448f2f5548728b43d6eca | [
"MIT"
] | null | null | null | python3/koans/about_control_statements.py | theagoliveira/python_koans | e89436a16f412fef5ee448f2f5548728b43d6eca | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutControlStatements(Koan):
def test_if_then_else_statements(self):
if True:
result = "true value"
else:
result = "false value"
self.assertEqual("true value", result)
def test_if_then_statements(self):
result = "default value"
if True:
result = "true value"
self.assertEqual("true value", result)
def test_if_then_elif_else_statements(self):
if False:
result = "first value"
elif True:
result = "true value"
else:
result = "default value"
self.assertEqual("true value", result)
def test_while_statement(self):
i = 1
result = 1
while i <= 10:
result = result * i
i += 1
self.assertEqual(3628800, result)
def test_break_statement(self):
i = 1
result = 1
while True:
if i > 10:
break
result = result * i
i += 1
self.assertEqual(3628800, result)
def test_continue_statement(self):
i = 0
result = []
while i < 10:
i += 1
if (i % 2) == 0:
continue
result.append(i)
self.assertEqual([1, 3, 5, 7, 9], result)
def test_for_statement(self):
phrase = ["fish", "and", "chips"]
result = []
for item in phrase:
result.append(item.upper())
self.assertEqual(["FISH", "AND", "CHIPS"], result)
def test_for_statement_with_tuples(self):
round_table = [
("Lancelot", "Blue"),
("Galahad", "I don't know!"),
("Robin", "Blue! I mean Green!"),
("Arthur", "Is that an African Swallow or European Swallow?"),
]
result = []
for knight, answer in round_table:
result.append("Contestant: '" + knight + "' Answer: '" + answer + "'")
text = "Contestant: 'Robin' Answer: 'Blue! I mean Green!'"
self.assertRegex(result[2], text)
self.assertNotRegex(result[0], text)
self.assertNotRegex(result[1], text)
self.assertNotRegex(result[3], text)
| 27.566265 | 84 | 0.519231 |
793f059d3e5da7e5639ade951ba1b32ec0c3c082 | 2,179 | py | Python | bt_proximity/bt_rssi.py | dovidgef/bluetooth-proximity | 9f4838a734132eb667e09e17f42803297bdd799c | [
"Apache-2.0"
] | null | null | null | bt_proximity/bt_rssi.py | dovidgef/bluetooth-proximity | 9f4838a734132eb667e09e17f42803297bdd799c | [
"Apache-2.0"
] | null | null | null | bt_proximity/bt_rssi.py | dovidgef/bluetooth-proximity | 9f4838a734132eb667e09e17f42803297bdd799c | [
"Apache-2.0"
] | null | null | null | import bluetooth
import bluetooth._bluetooth as bt
import struct
import array
import fcntl
class BluetoothRSSI(object):
"""Object class for getting the RSSI value of a Bluetooth address."""
def __init__(self, addr):
self.addr = addr
self.hci_sock = bt.hci_open_dev()
self.hci_fd = self.hci_sock.fileno()
self.bt_sock = bluetooth.BluetoothSocket(bluetooth.L2CAP)
self.bt_sock.settimeout(10)
self.connected = False
self.cmd_pkt = None
def prep_cmd_pkt(self):
"""Prepare the command packet for requesting RSSI."""
reqstr = struct.pack(
b'6sB17s', bt.str2ba(self.addr), bt.ACL_LINK, b'\0' * 17)
request = array.array('b', reqstr)
handle = fcntl.ioctl(self.hci_fd, bt.HCIGETCONNINFO, request, 1)
handle = struct.unpack(b'8xH14x', request.tostring())[0]
self.cmd_pkt = struct.pack('H', handle)
def connect(self):
"""Connect to the Bluetooth device."""
# Connecting via PSM 1 - Service Discovery
self.bt_sock.connect_ex((self.addr, 1))
self.connected = True
def request_rssi(self):
"""Request the current RSSI value.
@return: The RSSI value or None if the device connection fails
(i.e. the device is not in range).
"""
try:
# Only do connection if not already connected
if not self.connected:
self.connect()
# Command packet prepared each iteration to allow disconnect to trigger IOError
self.prep_cmd_pkt()
# Send command to request RSSI
rssi = bt.hci_send_req(
self.hci_sock, bt.OGF_STATUS_PARAM,
bt.OCF_READ_RSSI, bt.EVT_CMD_COMPLETE, 4, self.cmd_pkt)
rssi = struct.unpack('b', rssi[3].to_bytes(1, 'big'))
return rssi
except IOError:
# Happens if connection fails (e.g. device is not in range)
self.connected = False
# Socket recreated to allow device to successfully reconnect
self.bt_sock = bluetooth.BluetoothSocket(bluetooth.L2CAP)
return None
| 37.568966 | 91 | 0.613125 |
793f06480c102c12ea4247c7a67fbe85d6d4aef9 | 1,044 | py | Python | educaton/paginas_apps/paginas_turmas/urls.py | joaopedro02/assist_programing_education | 1ef0b4fbd9275e9cf74a607e31f35191cc07eeb3 | [
"Apache-2.0"
] | null | null | null | educaton/paginas_apps/paginas_turmas/urls.py | joaopedro02/assist_programing_education | 1ef0b4fbd9275e9cf74a607e31f35191cc07eeb3 | [
"Apache-2.0"
] | 14 | 2019-05-03T18:53:26.000Z | 2019-10-02T22:16:00.000Z | educaton/paginas_apps/paginas_turmas/urls.py | joaopedro02/assist_programing_education | 1ef0b4fbd9275e9cf74a607e31f35191cc07eeb3 | [
"Apache-2.0"
] | null | null | null | """educaton URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include,path
from . import views
app_name='paginas_turmas'
urlpatterns = [
path('',views.pagina_inicial.as_view(),name='pagina_inicial_turmas'),
path('<int:turma_id>/',views.turma.as_view(),name='pagina_turma_especifica'),
path('adicionar/',views.addturma,name='add_turma'),
path('criar/', views.cria_turma.as_view(),name='cria_turma'),
]
| 40.153846 | 81 | 0.720307 |
793f070275d9ea188547be7ef7caa1c43fa96379 | 224 | py | Python | Basic_Python/Test__all__01.py | ericzhai918/Leon.Zhai | 7a267473be27cbb21d597b036c4153fd888d4b20 | [
"MIT"
] | 1 | 2019-12-22T06:48:30.000Z | 2019-12-22T06:48:30.000Z | Basic_Python/Test__all__01.py | ericzhai918/Leon.Zhai | 7a267473be27cbb21d597b036c4153fd888d4b20 | [
"MIT"
] | null | null | null | Basic_Python/Test__all__01.py | ericzhai918/Leon.Zhai | 7a267473be27cbb21d597b036c4153fd888d4b20 | [
"MIT"
] | null | null | null |
'''测试__all__变量的模块'''
def hello():
print("Hello, Python")
def world():
print("Pyhton World is funny")
def test():
print('--test--')
# 定义__all__变量,指定默认只导入hello和world两个成员
__all__ = ['hello', 'world'] | 17.230769 | 37 | 0.602679 |
793f0733c95469369a8197264805efd386daa85a | 22,905 | py | Python | ckan/lib/dictization/model_dictize.py | NeCTAR-RC/ckan | ba560b99c30097f888acb3726fa73bfd4a727c6b | [
"Apache-2.0"
] | 1 | 2015-02-26T03:23:11.000Z | 2015-02-26T03:23:11.000Z | ckan/lib/dictization/model_dictize.py | NeCTAR-RC/ckan | ba560b99c30097f888acb3726fa73bfd4a727c6b | [
"Apache-2.0"
] | null | null | null | ckan/lib/dictization/model_dictize.py | NeCTAR-RC/ckan | ba560b99c30097f888acb3726fa73bfd4a727c6b | [
"Apache-2.0"
] | null | null | null | import datetime
import urlparse
from pylons import config
from sqlalchemy.sql import select
import ckan.logic as logic
import ckan.plugins as plugins
import ckan.lib.helpers as h
import ckan.lib.dictization as d
import ckan.new_authz as new_authz
import ckan.lib.search as search
## package save
def group_list_dictize(obj_list, context,
sort_key=lambda x:x['display_name'], reverse=False):
active = context.get('active', True)
with_private = context.get('include_private_packages', False)
query = search.PackageSearchQuery()
q = {'q': '+capacity:public' if not with_private else '*:*',
'fl': 'groups', 'facet.field': ['groups', 'owner_org'],
'facet.limit': -1, 'rows': 1}
query.run(q)
result_list = []
for obj in obj_list:
if context.get('with_capacity'):
obj, capacity = obj
group_dict = d.table_dictize(obj, context, capacity=capacity)
else:
group_dict = d.table_dictize(obj, context)
group_dict.pop('created')
if active and obj.state not in ('active', 'pending'):
continue
group_dict['display_name'] = obj.display_name
if obj.is_organization:
group_dict['packages'] = query.facets['owner_org'].get(obj.id, 0)
else:
group_dict['packages'] = query.facets['groups'].get(obj.name, 0)
if context.get('for_view'):
if group_dict['is_organization']:
plugin = plugins.IOrganizationController
else:
plugin = plugins.IGroupController
for item in plugins.PluginImplementations(plugin):
group_dict = item.before_view(group_dict)
result_list.append(group_dict)
return sorted(result_list, key=sort_key, reverse=reverse)
def resource_list_dictize(res_list, context):
active = context.get('active', True)
result_list = []
for res in res_list:
resource_dict = resource_dictize(res, context)
if active and res.state not in ('active', 'pending'):
continue
result_list.append(resource_dict)
return sorted(result_list, key=lambda x: x["position"])
def related_list_dictize(related_list, context):
result_list = []
for res in related_list:
related_dict = related_dictize(res, context)
result_list.append(related_dict)
return sorted(result_list, key=lambda x: x["created"], reverse=True)
def extras_dict_dictize(extras_dict, context):
result_list = []
for name, extra in extras_dict.iteritems():
dictized = d.table_dictize(extra, context)
if not extra.state == 'active':
continue
value = dictized["value"]
result_list.append(dictized)
return sorted(result_list, key=lambda x: x["key"])
def extras_list_dictize(extras_list, context):
result_list = []
active = context.get('active', True)
for extra in extras_list:
dictized = d.table_dictize(extra, context)
if active and extra.state not in ('active', 'pending'):
continue
value = dictized["value"]
result_list.append(dictized)
return sorted(result_list, key=lambda x: x["key"])
def _unified_resource_format(format_):
''' Convert resource formats into a more uniform set.
eg .json, json, JSON, text/json all converted to JSON.'''
format_clean = format_.lower().split('/')[-1].replace('.', '')
formats = {
'csv' : 'CSV',
'zip' : 'ZIP',
'pdf' : 'PDF',
'xls' : 'XLS',
'json' : 'JSON',
'kml' : 'KML',
'xml' : 'XML',
'shape' : 'SHAPE',
'rdf' : 'RDF',
'txt' : 'TXT',
'text' : 'TEXT',
'html' : 'HTML',
}
if format_clean in formats:
format_new = formats[format_clean]
else:
format_new = format_.lower()
return format_new
def resource_dictize(res, context):
resource = d.table_dictize(res, context)
extras = resource.pop("extras", None)
if extras:
resource.update(extras)
#tracking
if not context.get('for_edit'):
model = context['model']
tracking = model.TrackingSummary.get_for_resource(res.url)
resource['tracking_summary'] = tracking
resource['format'] = _unified_resource_format(res.format)
# some urls do not have the protocol this adds http:// to these
url = resource['url']
if not urlparse.urlsplit(url).scheme:
resource['url'] = u'http://' + url.lstrip('/')
return resource
def related_dictize(rel, context):
return d.table_dictize(rel, context)
def _execute_with_revision(q, rev_table, context):
'''
Takes an SqlAlchemy query (q) that is (at its base) a Select on an
object revision table (rev_table), and normally it filters to the
'current' object revision (latest which has been moderated) and
returns that.
But you can provide revision_id, revision_date or pending in the
context and it will filter to an earlier time or the latest unmoderated
object revision.
Raises NotFound if context['revision_id'] is provided, but the revision
ID does not exist.
Returns [] if there are no results.
'''
model = context['model']
meta = model.meta
session = model.Session
revision_id = context.get('revision_id')
revision_date = context.get('revision_date')
pending = context.get('pending')
if revision_id:
revision = session.query(context['model'].Revision).filter_by(
id=revision_id).first()
if not revision:
raise logic.NotFound
revision_date = revision.timestamp
if revision_date:
q = q.where(rev_table.c.revision_timestamp <= revision_date)
q = q.where(rev_table.c.expired_timestamp > revision_date)
elif pending:
q = q.where(rev_table.c.expired_timestamp == datetime.datetime(9999, 12, 31))
else:
q = q.where(rev_table.c.current == True)
return session.execute(q)
def package_dictize(pkg, context):
'''
Given a Package object, returns an equivalent dictionary.
Normally this is the current revision (most recent moderated version),
but you can provide revision_id, revision_date or pending in the
context and it will filter to an earlier time or the latest unmoderated
object revision.
May raise NotFound. TODO: understand what the specific set of
circumstances are that cause this.
'''
model = context['model']
#package
package_rev = model.package_revision_table
q = select([package_rev]).where(package_rev.c.id == pkg.id)
result = _execute_with_revision(q, package_rev, context).first()
if not result:
raise logic.NotFound
result_dict = d.table_dictize(result, context)
#resources
res_rev = model.resource_revision_table
resource_group = model.resource_group_table
q = select([res_rev], from_obj = res_rev.join(resource_group,
resource_group.c.id == res_rev.c.resource_group_id))
q = q.where(resource_group.c.package_id == pkg.id)
result = _execute_with_revision(q, res_rev, context)
result_dict["resources"] = resource_list_dictize(result, context)
result_dict['num_resources'] = len(result_dict.get('resources', []))
#tags
tag_rev = model.package_tag_revision_table
tag = model.tag_table
q = select([tag, tag_rev.c.state, tag_rev.c.revision_timestamp],
from_obj=tag_rev.join(tag, tag.c.id == tag_rev.c.tag_id)
).where(tag_rev.c.package_id == pkg.id)
result = _execute_with_revision(q, tag_rev, context)
result_dict["tags"] = d.obj_list_dictize(result, context, lambda x: x["name"])
result_dict['num_tags'] = len(result_dict.get('tags', []))
# Add display_names to tags. At first a tag's display_name is just the
# same as its name, but the display_name might get changed later (e.g.
# translated into another language by the multilingual extension).
for tag in result_dict['tags']:
assert not tag.has_key('display_name')
tag['display_name'] = tag['name']
#extras
extra_rev = model.extra_revision_table
q = select([extra_rev]).where(extra_rev.c.package_id == pkg.id)
result = _execute_with_revision(q, extra_rev, context)
result_dict["extras"] = extras_list_dictize(result, context)
#tracking
tracking = model.TrackingSummary.get_for_package(pkg.id)
result_dict['tracking_summary'] = tracking
#groups
member_rev = model.member_revision_table
group = model.group_table
q = select([group, member_rev.c.capacity],
from_obj=member_rev.join(group, group.c.id == member_rev.c.group_id)
).where(member_rev.c.table_id == pkg.id)\
.where(member_rev.c.state == 'active') \
.where(group.c.is_organization == False)
result = _execute_with_revision(q, member_rev, context)
result_dict["groups"] = d.obj_list_dictize(result, context)
#owning organization
group_rev = model.group_revision_table
q = select([group_rev]
).where(group_rev.c.id == pkg.owner_org) \
.where(group_rev.c.state == 'active')
result = _execute_with_revision(q, group_rev, context)
organizations = d.obj_list_dictize(result, context)
if organizations:
result_dict["organization"] = organizations[0]
else:
result_dict["organization"] = None
#relations
rel_rev = model.package_relationship_revision_table
q = select([rel_rev]).where(rel_rev.c.subject_package_id == pkg.id)
result = _execute_with_revision(q, rel_rev, context)
result_dict["relationships_as_subject"] = d.obj_list_dictize(result, context)
q = select([rel_rev]).where(rel_rev.c.object_package_id == pkg.id)
result = _execute_with_revision(q, rel_rev, context)
result_dict["relationships_as_object"] = d.obj_list_dictize(result, context)
# Extra properties from the domain object
# We need an actual Package object for this, not a PackageRevision
if isinstance(pkg, model.PackageRevision):
pkg = model.Package.get(pkg.id)
# isopen
result_dict['isopen'] = pkg.isopen if isinstance(pkg.isopen,bool) else pkg.isopen()
# type
# if null assign the default value to make searching easier
result_dict['type']= pkg.type or u'dataset'
# license
if pkg.license and pkg.license.url:
result_dict['license_url']= pkg.license.url
result_dict['license_title']= pkg.license.title.split('::')[-1]
elif pkg.license:
result_dict['license_title']= pkg.license.title
else:
result_dict['license_title']= pkg.license_id
# creation and modification date
result_dict['metadata_modified'] = pkg.metadata_modified.isoformat()
result_dict['metadata_created'] = pkg.metadata_created.isoformat() \
if pkg.metadata_created else None
return result_dict
def _get_members(context, group, member_type):
model = context['model']
Entity = getattr(model, member_type[:-1].capitalize())
q = model.Session.query(Entity, model.Member.capacity).\
join(model.Member, model.Member.table_id == Entity.id).\
filter(model.Member.group_id == group.id).\
filter(model.Member.state == 'active').\
filter(model.Member.table_name == member_type[:-1])
if member_type == 'packages':
q = q.filter(Entity.private==False)
if 'limits' in context and member_type in context['limits']:
return q[:context['limits'][member_type]]
return q.all()
def group_dictize(group, context):
model = context['model']
result_dict = d.table_dictize(group, context)
result_dict['display_name'] = group.display_name
result_dict['extras'] = extras_dict_dictize(
group._extras, context)
context['with_capacity'] = True
result_dict['packages'] = d.obj_list_dictize(
_get_members(context, group, 'packages'),
context)
query = search.PackageSearchQuery()
if group.is_organization:
q = {'q': 'owner_org:"%s" +capacity:public' % group.id, 'rows': 1}
else:
q = {'q': 'groups:"%s" +capacity:public' % group.name, 'rows': 1}
result_dict['package_count'] = query.run(q)['count']
result_dict['tags'] = tag_list_dictize(
_get_members(context, group, 'tags'),
context)
result_dict['groups'] = group_list_dictize(
_get_members(context, group, 'groups'),
context)
result_dict['users'] = user_list_dictize(
_get_members(context, group, 'users'),
context)
context['with_capacity'] = False
if context.get('for_view'):
if result_dict['is_organization']:
plugin = plugins.IOrganizationController
else:
plugin = plugins.IGroupController
for item in plugins.PluginImplementations(plugin):
result_dict = item.before_view(result_dict)
return result_dict
def tag_list_dictize(tag_list, context):
result_list = []
for tag in tag_list:
if context.get('with_capacity'):
tag, capacity = tag
dictized = d.table_dictize(tag, context, capacity=capacity)
else:
dictized = d.table_dictize(tag, context)
# Add display_names to tag dicts. At first a tag's display_name is just
# the same as its name, but the display_name might get changed later
# (e.g. translated into another language by the multilingual
# extension).
assert not dictized.has_key('display_name')
dictized['display_name'] = dictized['name']
if context.get('for_view'):
for item in plugins.PluginImplementations(
plugins.ITagController):
dictized = item.before_view(dictized)
result_list.append(dictized)
return result_list
def tag_dictize(tag, context):
tag_dict = d.table_dictize(tag, context)
query = search.PackageSearchQuery()
tag_query = u'+capacity:public '
vocab_id = tag_dict.get('vocabulary_id')
if vocab_id:
model = context['model']
vocab = model.Vocabulary.get(vocab_id)
tag_query += u'+vocab_{0}:"{1}"'.format(vocab.name, tag.name)
else:
tag_query += u'+tags:"{0}"'.format(tag.name)
q = {'q': tag_query, 'fl': 'data_dict', 'wt': 'json', 'rows': 1000}
package_dicts = [h.json.loads(result['data_dict'])
for result in query.run(q)['results']]
# Add display_names to tags. At first a tag's display_name is just the
# same as its name, but the display_name might get changed later (e.g.
# translated into another language by the multilingual extension).
assert 'display_name' not in tag_dict
tag_dict['display_name'] = tag_dict['name']
if context.get('for_view'):
for item in plugins.PluginImplementations(plugins.ITagController):
tag_dict = item.before_view(tag_dict)
tag_dict['packages'] = []
for package_dict in package_dicts:
for item in plugins.PluginImplementations(plugins.IPackageController):
package_dict = item.before_view(package_dict)
tag_dict['packages'].append(package_dict)
else:
tag_dict['packages'] = package_dicts
return tag_dict
def user_list_dictize(obj_list, context,
sort_key=lambda x:x['name'], reverse=False):
result_list = []
for obj in obj_list:
user_dict = user_dictize(obj, context)
result_list.append(user_dict)
return sorted(result_list, key=sort_key, reverse=reverse)
def member_dictize(member, context):
return d.table_dictize(member, context)
def user_dictize(user, context):
if context.get('with_capacity'):
user, capacity = user
result_dict = d.table_dictize(user, context, capacity=capacity)
else:
result_dict = d.table_dictize(user, context)
del result_dict['password']
result_dict['display_name'] = user.display_name
result_dict['email_hash'] = user.email_hash
result_dict['number_of_edits'] = user.number_of_edits()
result_dict['number_administered_packages'] = user.number_administered_packages()
requester = context.get('user')
if not (new_authz.is_sysadmin(requester) or
requester == user.name or
context.get('keep_sensitive_data', False)):
# If not sysadmin or the same user, strip sensible info
result_dict.pop('apikey', None)
result_dict.pop('reset_key', None)
result_dict.pop('email', None)
model = context['model']
session = model.Session
if context.get('with_related'):
related_items = session.query(model.Related).\
filter(model.Related.owner_id==user.id).all()
result_dict['related_items'] = related_list_dictize(related_items,
context)
return result_dict
def task_status_dictize(task_status, context):
return d.table_dictize(task_status, context)
## conversion to api
def group_to_api(group, context):
api_version = context.get('api_version')
assert api_version, 'No api_version supplied in context'
dictized = group_dictize(group, context)
dictized["extras"] = dict((extra["key"], extra["value"])
for extra in dictized["extras"])
if api_version == 1:
dictized["packages"] = sorted([pkg["name"] for pkg in dictized["packages"]])
else:
dictized["packages"] = sorted([pkg["id"] for pkg in dictized["packages"]])
return dictized
def tag_to_api(tag, context):
api_version = context.get('api_version')
assert api_version, 'No api_version supplied in context'
dictized = tag_dictize(tag, context)
if api_version == 1:
return sorted([package["name"] for package in dictized["packages"]])
else:
return sorted([package["id"] for package in dictized["packages"]])
def resource_dict_to_api(res_dict, package_id, context):
res_dict.pop("revision_id")
res_dict.pop("state")
res_dict.pop("revision_timestamp")
res_dict["package_id"] = package_id
def package_to_api(pkg, context):
api_version = context.get('api_version')
assert api_version, 'No api_version supplied in context'
dictized = package_dictize(pkg, context)
dictized.pop("revision_timestamp")
dictized["tags"] = [tag["name"] for tag in dictized["tags"] \
if not tag.get('vocabulary_id')]
dictized["extras"] = dict((extra["key"], extra["value"])
for extra in dictized["extras"])
dictized['license'] = pkg.license.title if pkg.license else None
dictized['ratings_average'] = pkg.get_average_rating()
dictized['ratings_count'] = len(pkg.ratings)
dictized['notes_rendered'] = h.render_markdown(pkg.notes)
site_url = config.get('ckan.site_url', None)
if site_url:
dictized['ckan_url'] = '%s/dataset/%s' % (site_url, pkg.name)
for resource in dictized["resources"]:
resource_dict_to_api(resource, pkg.id, context)
def make_api_1(package_id):
return pkg.get(package_id).name
def make_api_2(package_id):
return package_id
if api_version == 1:
api_fn = make_api_1
dictized["groups"] = [group["name"] for group in dictized["groups"]]
# FIXME why is this just for version 1?
if pkg.resources:
dictized['download_url'] = pkg.resources[0].url
else:
api_fn = make_api_2
dictized["groups"] = [group["id"] for group in dictized["groups"]]
subjects = dictized.pop("relationships_as_subject")
objects = dictized.pop("relationships_as_object")
relationships = []
for rel in objects:
model = context['model']
swap_types = model.PackageRelationship.forward_to_reverse_type
type = swap_types(rel['type'])
relationships.append({'subject': api_fn(rel['object_package_id']),
'type': type,
'object': api_fn(rel['subject_package_id']),
'comment': rel["comment"]})
for rel in subjects:
relationships.append({'subject': api_fn(rel['subject_package_id']),
'type': rel['type'],
'object': api_fn(rel['object_package_id']),
'comment': rel["comment"]})
dictized['relationships'] = relationships
return dictized
def vocabulary_dictize(vocabulary, context):
vocabulary_dict = d.table_dictize(vocabulary, context)
assert not vocabulary_dict.has_key('tags')
vocabulary_dict['tags'] = [tag_dictize(tag, context) for tag
in vocabulary.tags]
return vocabulary_dict
def vocabulary_list_dictize(vocabulary_list, context):
return [vocabulary_dictize(vocabulary, context)
for vocabulary in vocabulary_list]
def activity_dictize(activity, context):
activity_dict = d.table_dictize(activity, context)
return activity_dict
def activity_list_dictize(activity_list, context):
return [activity_dictize(activity, context) for activity in activity_list]
def activity_detail_dictize(activity_detail, context):
return d.table_dictize(activity_detail, context)
def activity_detail_list_dictize(activity_detail_list, context):
return [activity_detail_dictize(activity_detail, context)
for activity_detail in activity_detail_list]
def package_to_api1(pkg, context):
# DEPRICIATED set api_version in context and use package_to_api()
context['api_version'] = 1
return package_to_api(pkg, context)
def package_to_api2(pkg, context):
# DEPRICIATED set api_version in context and use package_to_api()
context['api_version'] = 2
return package_to_api(pkg, context)
def group_to_api1(group, context):
# DEPRICIATED set api_version in context and use group_to_api()
context['api_version'] = 1
return group_to_api(group, context)
def group_to_api2(group, context):
# DEPRICIATED set api_version in context and use group_to_api()
context['api_version'] = 2
return group_to_api(group, context)
def tag_to_api1(tag, context):
# DEPRICIATED set api_version in context and use tag_to_api()
context['api_version'] = 1
return tag_to_api(tag, context)
def tag_to_api2(tag, context):
# DEPRICIATED set api_version in context and use tag_to_api()
context['api_version'] = 2
return tag_to_api(tag, context)
def user_following_user_dictize(follower, context):
return d.table_dictize(follower, context)
def user_following_dataset_dictize(follower, context):
return d.table_dictize(follower, context)
def user_following_group_dictize(follower, context):
return d.table_dictize(follower, context)
| 35.84507 | 87 | 0.662344 |
793f077bea53e1ef5406728e1584518ef486c11a | 3,815 | py | Python | usr/src/tools/scripts/cddlchk.py | AsahiOS/gate | 283d47da4e17a5871d9d575e7ffb81e8f6c52e51 | [
"MIT"
] | null | null | null | usr/src/tools/scripts/cddlchk.py | AsahiOS/gate | 283d47da4e17a5871d9d575e7ffb81e8f6c52e51 | [
"MIT"
] | null | null | null | usr/src/tools/scripts/cddlchk.py | AsahiOS/gate | 283d47da4e17a5871d9d575e7ffb81e8f6c52e51 | [
"MIT"
] | 1 | 2020-12-30T00:04:16.000Z | 2020-12-30T00:04:16.000Z | #!@TOOLS_PYTHON@
#
# CDDL HEADER START
#
# The contents of this file are subject to the terms of the
# Common Development and Distribution License (the "License").
# You may not use this file except in compliance with the License.
#
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
# or http://www.opensolaris.org/os/licensing.
# See the License for the specific language governing permissions
# and limitations under the License.
#
# When distributing Covered Code, include this CDDL HEADER in each
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
# If applicable, add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your own identifying
# information: Portions Copyright [yyyy] [name of copyright owner]
#
# CDDL HEADER END
#
#
# Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
# Copyright 2018 OmniOS Community Edition (OmniOSce) Association.
#
#
# Check for valid CDDL blocks in source files.
#
import sys, os, io, getopt, fnmatch
sys.path.insert(1, os.path.join(os.path.dirname(__file__), "..", "lib",
"python%d.%d" % sys.version_info[:2]))
# Allow running from the source tree, using the modules in the source tree
sys.path.insert(2, os.path.join(os.path.dirname(__file__), '..'))
from onbld.Checks.Cddl import cddlchk
class ExceptionList(object):
def __init__(self):
self.dirs = []
self.files = []
self.extensions = []
def load(self, exfile):
fh = None
try:
fh = open(exfile, 'r')
except IOError as e:
sys.stderr.write('Failed to open exception list: '
'%s: %s\n' % (e.filename, e.strerror))
sys.exit(2)
for line in fh:
line = line.strip()
if line.strip().endswith('/'):
self.dirs.append(line[0:-1])
elif line.startswith('*.'):
self.extensions.append(line)
else:
self.files.append(line)
fh.close()
def match(self, filename):
if os.path.isdir(filename):
return filename in self.dirs
else:
if filename in self.files:
return True
for pat in self.extensions:
if fnmatch.fnmatch(filename, pat):
return True
def __contains__(self, elt):
return self.match(elt)
def usage():
progname = os.path.split(sys.argv[0])[1]
sys.stderr.write('''Usage: %s [-av] [-x exceptions] paths...
-a check that all the specified files have a CDDL block.
-v report on all files, not just those with errors.
-x exceptions load an exceptions file
''' % progname)
sys.exit(2)
def check(filename, opts):
try:
with io.open(filename, encoding='utf-8',
errors='replace') as fh:
return cddlchk(fh, verbose=opts['verbose'],
lenient=opts['lenient'],
output=sys.stdout)
except IOError as e:
sys.stderr.write("failed to open '%s': %s\n" %
(e.filename, e.strerror))
return 1
def walker(opts, dirname, fnames):
for f in fnames:
path = os.path.join(dirname, f)
if not os.path.isdir(path):
if not path in opts['exclude']:
opts['status'] |= check(path, opts)
else:
if path in opts['exclude']:
fnames.remove(f)
def walkpath(path, opts):
if os.path.isdir(path):
os.path.walk(path, walker, opts)
else:
if not path in opts['exclude']:
opts['status'] |= check(path, opts)
def main(args):
options = {
'status': 0,
'lenient': True,
'verbose': False,
'exclude': ExceptionList()
}
try:
opts, args = getopt.getopt(sys.argv[1:], 'avx:')
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt == '-a':
options['lenient'] = False
elif opt == '-v':
options['verbose'] = True
elif opt == '-x':
options['exclude'].load(arg)
for path in args:
walkpath(path, options)
return options['status']
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| 25.098684 | 78 | 0.665269 |
793f08c51603588dec7eec50c347df9d46dc7f49 | 1,381 | py | Python | examples/aspect_term_extraction/extract_aspects_chinese.py | XuMayi/DLCF-DCA | 2c56fd0846074318380d4440a4ee4d0056d03450 | [
"MIT"
] | 13 | 2021-06-17T04:02:31.000Z | 2021-09-17T10:11:20.000Z | examples/aspect_term_extraction/extract_aspects_chinese.py | Harper-Li/DLCF-DCA | 2c56fd0846074318380d4440a4ee4d0056d03450 | [
"MIT"
] | null | null | null | examples/aspect_term_extraction/extract_aspects_chinese.py | Harper-Li/DLCF-DCA | 2c56fd0846074318380d4440a4ee4d0056d03450 | [
"MIT"
] | 5 | 2021-06-17T04:10:57.000Z | 2022-03-14T13:03:01.000Z | # -*- coding: utf-8 -*-
# file: extract_aspects_chinese.py
# time: 2021/5/27 0027
# author: yangheng <yangheng@m.scnu.edu.cn>
# github: https://github.com/yangheng95
# Copyright (C) 2021. All Rights Reserved.
from pyabsa import ATEPCCheckpointManager, ABSADatasetList
# There might batch extraction function in the future
examples = ['尤 其 是 照 的 大 尺 寸 照 片 时 效 果 也 是 非 常 不 错 的',
'照 大 尺 寸 的 照 片 的 时 候 手 机 反 映 速 度 太 慢',
'关 键 的 时 候 需 要 表 现 持 续 影 像 的 短 片 功 能 还 是 很 有 用 的',
'相 比 较 原 系 列 锐 度 高 了 不 少 这 一 点 好 与 不 好 大 家 有 争 议',
'相比较原系列锐度高了不少这一点好与不好大家有争议',
'这款手机的大小真的很薄,但是颜色不太好看, 总体上我很满意啦。'
]
# 从Google Drive下载提供的预训练模型
aspect_extractor = ATEPCCheckpointManager.get_aspect_extractor(checkpoint='lcf_atepc_cdw_apcacc_96.37_apcf1_94.63_atef1_47.26')
# model_path = 'state_dict/lcf_atepc_cdw_apcacc_96.88_apcf1_96.35_atef1_91.2'
# 如果有需要,使用以下方法自定义情感索引到情感标签的词典, 其中-999为必需的填充, e.g.,
sentiment_map = {0: 'Bad', 1: 'Good', -999: ''}
examples = ABSADatasetList.MOOC
atepc_result = aspect_extractor.extract_aspect(inference_source=examples, # list-support only, for now
print_result=True, # print the result
pred_sentiment=True, # Predict the sentiment of extracted aspect terms
)
| 43.15625 | 127 | 0.622013 |
793f08fe395fc0966a6532830be50fca8d73d614 | 5,485 | py | Python | python/dgl/distributed/rpc_client.py | heming-zhang/dgl | 2d3bd7a3a4d29c68f02779317758a25bbf254078 | [
"Apache-2.0"
] | null | null | null | python/dgl/distributed/rpc_client.py | heming-zhang/dgl | 2d3bd7a3a4d29c68f02779317758a25bbf254078 | [
"Apache-2.0"
] | null | null | null | python/dgl/distributed/rpc_client.py | heming-zhang/dgl | 2d3bd7a3a4d29c68f02779317758a25bbf254078 | [
"Apache-2.0"
] | null | null | null | """Functions used by client."""
import os
import socket
from . import rpc
from .constants import MAX_QUEUE_SIZE
if os.name != 'nt':
import fcntl
import struct
def local_ip4_addr_list():
"""Return a set of IPv4 address
"""
assert os.name != 'nt', 'Do not support Windows rpc yet.'
nic = set()
for if_nidx in socket.if_nameindex():
name = if_nidx[1]
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ip_addr = socket.inet_ntoa(fcntl.ioctl(
sock.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', name[:15].encode("UTF-8")))[20:24])
nic.add(ip_addr)
return nic
def get_local_machine_id(server_namebook):
"""Given server_namebook, find local machine ID
Parameters
----------
server_namebook: dict
IP address namebook of server nodes, where key is the server's ID
(start from 0) and value is the server's machine_id, IP address,
port, and group_count, e.g.,
{0:'[0, '172.31.40.143', 30050, 2],
1:'[0, '172.31.40.143', 30051, 2],
2:'[1, '172.31.36.140', 30050, 2],
3:'[1, '172.31.36.140', 30051, 2],
4:'[2, '172.31.47.147', 30050, 2],
5:'[2, '172.31.47.147', 30051, 2],
6:'[3, '172.31.30.180', 30050, 2],
7:'[3, '172.31.30.180', 30051, 2]}
Returns
-------
int
local machine ID
"""
res = 0
ip_list = local_ip4_addr_list()
for _, data in server_namebook.items():
machine_id = data[0]
ip_addr = data[1]
if ip_addr in ip_list:
res = machine_id
break
return res
def get_local_usable_addr():
"""Get local usable IP and port
Returns
-------
str
IP address, e.g., '192.168.8.12:50051'
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
sock.connect(('10.255.255.255', 1))
ip_addr = sock.getsockname()[0]
except ValueError:
ip_addr = '127.0.0.1'
finally:
sock.close()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(("", 0))
sock.listen(1)
port = sock.getsockname()[1]
sock.close()
return ip_addr + ':' + str(port)
def connect_to_server(ip_config, max_queue_size=MAX_QUEUE_SIZE, net_type='socket'):
"""Connect this client to server.
Parameters
----------
ip_config : str
Path of server IP configuration file.
max_queue_size : int
Maximal size (bytes) of client queue buffer (~20 GB on default).
Note that the 20 GB is just an upper-bound and DGL uses zero-copy and
it will not allocate 20GB memory at once.
net_type : str
Networking type. Current options are: 'socket'.
Raises
------
ConnectionError : If anything wrong with the connection.
"""
assert max_queue_size > 0, 'queue_size (%d) cannot be a negative number.' % max_queue_size
assert net_type in ('socket'), 'net_type (%s) can only be \'socket\'.' % net_type
# Register some basic service
rpc.register_service(rpc.CLIENT_REGISTER,
rpc.ClientRegisterRequest,
rpc.ClientRegisterResponse)
rpc.register_service(rpc.SHUT_DOWN_SERVER,
rpc.ShutDownRequest,
None)
server_namebook = rpc.read_ip_config(ip_config)
num_servers = len(server_namebook)
rpc.set_num_server(num_servers)
# group_count means how many servers
# (main_server + bakcup_server) in total inside a machine.
group_count = []
max_machine_id = 0
for server_info in server_namebook.values():
group_count.append(server_info[3])
if server_info[0] > max_machine_id:
max_machine_id = server_info[0]
num_machines = max_machine_id+1
rpc.set_num_machines(num_machines)
machine_id = get_local_machine_id(server_namebook)
rpc.set_machine_id(machine_id)
rpc.create_sender(max_queue_size, net_type)
rpc.create_receiver(max_queue_size, net_type)
# Get connected with all server nodes
for server_id, addr in server_namebook.items():
server_ip = addr[1]
server_port = addr[2]
rpc.add_receiver_addr(server_ip, server_port, server_id)
rpc.sender_connect()
# Get local usable IP address and port
ip_addr = get_local_usable_addr()
client_ip, client_port = ip_addr.split(':')
# Register client on server
register_req = rpc.ClientRegisterRequest(ip_addr)
for server_id in range(num_servers):
rpc.send_request(server_id, register_req)
# wait server connect back
rpc.receiver_wait(client_ip, client_port, num_servers)
# recv client ID from server
res = rpc.recv_response()
rpc.set_rank(res.client_id)
print("Machine (%d) client (%d) connect to server successfuly!" \
% (machine_id, rpc.get_rank()))
def finalize_client():
"""Release resources of this client."""
rpc.finalize_sender()
rpc.finalize_receiver()
def shutdown_servers():
"""Issue commands to remote servers to shut them down.
Raises
------
ConnectionError : If anything wrong with the connection.
"""
if rpc.get_rank() == 0: # Only client_0 issue this command
req = rpc.ShutDownRequest(rpc.get_rank())
for server_id in range(rpc.get_num_server()):
rpc.send_request(server_id, req)
| 32.64881 | 94 | 0.62753 |
793f0ae231c423499d6c981eef3cbf1a27965189 | 1,333 | py | Python | src/ucis/cmd/__main__.py | furiosa-ai/pyucis | 233277abf5a86e1158ae2cc09d91152ca9f1e517 | [
"Apache-2.0"
] | 16 | 2020-03-25T21:31:49.000Z | 2022-01-18T22:34:05.000Z | src/ucis/cmd/__main__.py | furiosa-ai/pyucis | 233277abf5a86e1158ae2cc09d91152ca9f1e517 | [
"Apache-2.0"
] | 4 | 2020-01-05T00:26:00.000Z | 2022-01-27T07:44:06.000Z | src/ucis/cmd/__main__.py | furiosa-ai/pyucis | 233277abf5a86e1158ae2cc09d91152ca9f1e517 | [
"Apache-2.0"
] | 4 | 2019-12-23T06:23:11.000Z | 2022-01-09T07:41:32.000Z | '''
Created on Mar 24, 2020
@author: ballance
'''
import argparse
from ucis.ucis import UCIS
import os
def read_db(filename) -> UCIS:
ext = os.path.splitext(filename)[1]
if ext == "xml":
print("XML")
elif ext == "scdb":
print("SCDB")
else:
raise Exception("Unknown file extension")
return None
def report_cmd(args):
db = read_db(args.db)
pass
def get_parser():
parser = argparse.ArgumentParser()
subparser = parser.add_subparsers()
subparser.required = True
report = subparser.add_parser("report",
help="Produce a coverage report")
report.add_argument("--text", "-t",
help="Produce a coverage report in text format (default)")
report.add_argument("--xml", "-x",
help="Produce a coverage report in XML (Cobertura) format")
report.add_argument("--output", "-o",
help="Specify the output name. Default is report.[ext]")
report.add_argument("--detail", "-d",
help="Include bin details in coverage report")
report.add_argument("db",
help="Database to read")
report.set_defaults(func=report_cmd)
return parser
def main():
parser = get_parser()
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main() | 21.852459 | 67 | 0.612153 |
793f0c7c03c7760bca8f4f828c9e3f1bb6c91859 | 5,729 | py | Python | asv_bench/benchmarks/multiindex_object.py | garyteofanus/pandas | cc51219fad8add8f442b847ccdabd3f9e9077cb6 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 6 | 2020-09-10T15:03:25.000Z | 2021-04-01T22:48:33.000Z | asv_bench/benchmarks/multiindex_object.py | garyteofanus/pandas | cc51219fad8add8f442b847ccdabd3f9e9077cb6 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 3 | 2020-03-09T13:15:03.000Z | 2020-03-20T10:07:10.000Z | asv_bench/benchmarks/multiindex_object.py | garyteofanus/pandas | cc51219fad8add8f442b847ccdabd3f9e9077cb6 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 4 | 2020-02-07T05:05:32.000Z | 2020-05-11T06:06:17.000Z | import string
import numpy as np
from pandas import DataFrame, MultiIndex, RangeIndex, date_range
from .pandas_vb_common import tm
class GetLoc:
def setup(self):
self.mi_large = MultiIndex.from_product(
[np.arange(1000), np.arange(20), list(string.ascii_letters)],
names=["one", "two", "three"],
)
self.mi_med = MultiIndex.from_product(
[np.arange(1000), np.arange(10), list("A")], names=["one", "two", "three"]
)
self.mi_small = MultiIndex.from_product(
[np.arange(100), list("A"), list("A")], names=["one", "two", "three"]
)
def time_large_get_loc(self):
self.mi_large.get_loc((999, 19, "Z"))
def time_large_get_loc_warm(self):
for _ in range(1000):
self.mi_large.get_loc((999, 19, "Z"))
def time_med_get_loc(self):
self.mi_med.get_loc((999, 9, "A"))
def time_med_get_loc_warm(self):
for _ in range(1000):
self.mi_med.get_loc((999, 9, "A"))
def time_string_get_loc(self):
self.mi_small.get_loc((99, "A", "A"))
def time_small_get_loc_warm(self):
for _ in range(1000):
self.mi_small.get_loc((99, "A", "A"))
class Duplicates:
def setup(self):
size = 65536
arrays = [np.random.randint(0, 8192, size), np.random.randint(0, 1024, size)]
mask = np.random.rand(size) < 0.1
self.mi_unused_levels = MultiIndex.from_arrays(arrays)
self.mi_unused_levels = self.mi_unused_levels[mask]
def time_remove_unused_levels(self):
self.mi_unused_levels.remove_unused_levels()
class Integer:
def setup(self):
self.mi_int = MultiIndex.from_product(
[np.arange(1000), np.arange(1000)], names=["one", "two"]
)
self.obj_index = np.array(
[
(0, 10),
(0, 11),
(0, 12),
(0, 13),
(0, 14),
(0, 15),
(0, 16),
(0, 17),
(0, 18),
(0, 19),
],
dtype=object,
)
def time_get_indexer(self):
self.mi_int.get_indexer(self.obj_index)
def time_is_monotonic(self):
self.mi_int.is_monotonic
class Duplicated:
def setup(self):
n, k = 200, 5000
levels = [np.arange(n), tm.makeStringIndex(n).values, 1000 + np.arange(n)]
codes = [np.random.choice(n, (k * n)) for lev in levels]
self.mi = MultiIndex(levels=levels, codes=codes)
def time_duplicated(self):
self.mi.duplicated()
class Sortlevel:
def setup(self):
n = 1182720
low, high = -4096, 4096
arrs = [
np.repeat(np.random.randint(low, high, (n // k)), k)
for k in [11, 7, 5, 3, 1]
]
self.mi_int = MultiIndex.from_arrays(arrs)[np.random.permutation(n)]
a = np.repeat(np.arange(100), 1000)
b = np.tile(np.arange(1000), 100)
self.mi = MultiIndex.from_arrays([a, b])
self.mi = self.mi.take(np.random.permutation(np.arange(100000)))
def time_sortlevel_int64(self):
self.mi_int.sortlevel()
def time_sortlevel_zero(self):
self.mi.sortlevel(0)
def time_sortlevel_one(self):
self.mi.sortlevel(1)
class Values:
def setup_cache(self):
level1 = range(1000)
level2 = date_range(start="1/1/2012", periods=100)
mi = MultiIndex.from_product([level1, level2])
return mi
def time_datetime_level_values_copy(self, mi):
mi.copy().values
def time_datetime_level_values_sliced(self, mi):
mi[:10].values
class CategoricalLevel:
def setup(self):
self.df = DataFrame(
{
"a": np.arange(1_000_000, dtype=np.int32),
"b": np.arange(1_000_000, dtype=np.int64),
"c": np.arange(1_000_000, dtype=float),
}
).astype({"a": "category", "b": "category"})
def time_categorical_level(self):
self.df.set_index(["a", "b"])
class Equals:
def setup(self):
idx_large_fast = RangeIndex(100000)
idx_small_slow = date_range(start="1/1/2012", periods=1)
self.mi_large_slow = MultiIndex.from_product([idx_large_fast, idx_small_slow])
self.idx_non_object = RangeIndex(1)
def time_equals_non_object_index(self):
self.mi_large_slow.equals(self.idx_non_object)
class SetOperations:
params = [
("monotonic", "non_monotonic"),
("datetime", "int", "string"),
("intersection", "union", "symmetric_difference"),
]
param_names = ["index_structure", "dtype", "method"]
def setup(self, index_structure, dtype, method):
N = 10 ** 5
level1 = range(1000)
level2 = date_range(start="1/1/2000", periods=N // 1000)
dates_left = MultiIndex.from_product([level1, level2])
level2 = range(N // 1000)
int_left = MultiIndex.from_product([level1, level2])
level2 = tm.makeStringIndex(N // 1000).values
str_left = MultiIndex.from_product([level1, level2])
data = {
"datetime": dates_left,
"int": int_left,
"string": str_left,
}
if index_structure == "non_monotonic":
data = {k: mi[::-1] for k, mi in data.items()}
data = {k: {"left": mi, "right": mi[:-1]} for k, mi in data.items()}
self.left = data[dtype]["left"]
self.right = data[dtype]["right"]
def time_operation(self, index_structure, dtype, method):
getattr(self.left, method)(self.right)
from .pandas_vb_common import setup # noqa: F401 isort:skip
| 28.221675 | 86 | 0.572177 |
793f0c8f4769d0fb28eee4c8c78aa4652a83a739 | 334 | py | Python | detect_secrets/core/upgrades/v0_12.py | paulo-sampaio/detect-secrets | 73ffbc35a72cb316d9e1842cc131b6098cf3c36a | [
"Apache-2.0"
] | 2,212 | 2018-04-03T20:58:42.000Z | 2022-03-31T17:58:38.000Z | detect_secrets/core/upgrades/v0_12.py | paulo-sampaio/detect-secrets | 73ffbc35a72cb316d9e1842cc131b6098cf3c36a | [
"Apache-2.0"
] | 354 | 2018-04-03T16:29:55.000Z | 2022-03-31T18:26:26.000Z | detect_secrets/core/upgrades/v0_12.py | paulo-sampaio/detect-secrets | 73ffbc35a72cb316d9e1842cc131b6098cf3c36a | [
"Apache-2.0"
] | 298 | 2018-04-02T19:35:15.000Z | 2022-03-28T04:52:14.000Z | from typing import Any
from typing import Dict
def upgrade(baseline: Dict[str, Any]) -> None:
if 'exclude_regex' in baseline:
baseline['exclude'] = {
'files': baseline.pop('exclude_regex'),
'lines': None,
}
baseline['word_list'] = {
'file': None,
'hash': None,
}
| 20.875 | 51 | 0.54491 |
793f0d443f0e6177b3c011db32a42ffb8bf0fefc | 605 | py | Python | hatter_orm/fields.py | ndrwpvlv/hatter_orm | b358c8858a93eaf60b41e8b81940b6348d78bf68 | [
"MIT"
] | 2 | 2020-01-08T13:03:26.000Z | 2021-07-15T12:33:45.000Z | hatter_orm/fields.py | ndrwpvlv/hatter_orm | b358c8858a93eaf60b41e8b81940b6348d78bf68 | [
"MIT"
] | null | null | null | hatter_orm/fields.py | ndrwpvlv/hatter_orm | b358c8858a93eaf60b41e8b81940b6348d78bf68 | [
"MIT"
] | null | null | null | class Field:
def __init__(self):
self.name = None
def get_type(self):
return self.name.upper()
class IntegerField(Field):
def __init__(self):
super(IntegerField, self).__init__()
self.name = 'Integer'
class RealField(Field):
def __init__(self):
super(RealField, self).__init__()
self.name = 'Real'
class TextField(Field):
def __init__(self):
super(TextField, self).__init__()
self.name = 'Text'
class BlobField(Field):
def __init__(self):
super(BlobField, self).__init__()
self.name = 'Blob'
| 19.516129 | 44 | 0.608264 |
793f0db77ca8285188b7f156ef76359e5df5ab7d | 59 | py | Python | org/cloudbsd/common/utils/utils.py | cloudbsdorg/cloudbsd-tools-python3 | 7271ef765cb565364d76bb397322f3225fbd7544 | [
"BSD-3-Clause"
] | null | null | null | org/cloudbsd/common/utils/utils.py | cloudbsdorg/cloudbsd-tools-python3 | 7271ef765cb565364d76bb397322f3225fbd7544 | [
"BSD-3-Clause"
] | null | null | null | org/cloudbsd/common/utils/utils.py | cloudbsdorg/cloudbsd-tools-python3 | 7271ef765cb565364d76bb397322f3225fbd7544 | [
"BSD-3-Clause"
] | null | null | null | class Utils:
def __init__(self):
print(f'Hi')
| 11.8 | 23 | 0.559322 |