hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fff3557fd7e005babefb16e3b6b117ef8a3354ec | 918 | py | Python | file_automation.py | FlightDev/YSPA | 5226712ebf305e7a3c686c43c996517a617f748b | [
"MIT"
] | null | null | null | file_automation.py | FlightDev/YSPA | 5226712ebf305e7a3c686c43c996517a617f748b | [
"MIT"
] | null | null | null | file_automation.py | FlightDev/YSPA | 5226712ebf305e7a3c686c43c996517a617f748b | [
"MIT"
] | null | null | null | import os
import glob
from astropy.io import fits
#/home/student/Desktop/Images/iTelescope/20180716-California-T24-GOOD
# Yo Neal. When you use this program, you have to change a few things between iTelescope and LFOP
# FIRST, remember to change the file path or you'll be a dummy. Also for LFOP -13 and -12 while
# for iTelescope it should be -9 and -8. Hopefully you know what to do with those numbers...
#/home/student/Desktop/Images/LFOP
dir = '20180726-LFOP-GOOD'
path = '/home/student/Desktop/Images/LFOP/' + dir + '/'
dict = {}
date = ""
for filename in os.listdir(path):
if filename.endswith(".fit"):
file = path + str(filename)
image = fits.open(file)
s = image[0].header.get("DATE-OBS")
date = s[:len(s) - 13]
dict.update({s[len(s) - 12:]: filename})
for key, value in sorted(dict.items()):
print value + "\t\t" + str(key)
print date
print len(dict)
| 32.785714 | 99 | 0.667756 |
fff3dd07c2f6cdec73bcd25788a20c7594c2652d | 959 | py | Python | streamlit/main.py | prakhar134/clean-or-messy | 0b9080363c48ca9cff0449875dfcbd169ef64321 | [
"MIT"
] | 13 | 2020-10-08T13:52:21.000Z | 2022-03-11T07:02:35.000Z | streamlit/main.py | architsharmaa/clean-or-messy | b40028cb4c4c8bbefb91a4b016096953b445c146 | [
"MIT"
] | null | null | null | streamlit/main.py | architsharmaa/clean-or-messy | b40028cb4c4c8bbefb91a4b016096953b445c146 | [
"MIT"
] | 9 | 2020-10-08T12:02:50.000Z | 2022-01-25T23:38:46.000Z | from fastai.vision.all import *
from PIL import Image
import streamlit as st
import numpy as np
from io import BytesIO
from .config import imgWidth, imgHeight
st.title("CleanvsMessy")
st.markdown('''
## Upload the image''',True)
st.set_option('deprecation.showfileUploaderEncoding', False)
file = st.file_uploader(" ")
model = load_learner('model/model_v0.pkl')
st.markdown('''
## Preview of the Image''',True)
if file != None:
st.image(file, width = imgWidth, height = imgHeight)
if file != None:
result = upload(file)
st.write("Is Image Clean? "+result["is_image_clean"])
st.write("Confidence "+str(result["predictedVal"])) | 30.935484 | 65 | 0.683003 |
fff46233cd9fc6a4821a3755e7bb2b8fd09e058e | 1,030 | py | Python | read_trials.py | Volkarl/P10-ExoskeletonTransferLearning | 311daf3791c65838ff9c496eeb6526b096b41d4c | [
"MIT"
] | null | null | null | read_trials.py | Volkarl/P10-ExoskeletonTransferLearning | 311daf3791c65838ff9c496eeb6526b096b41d4c | [
"MIT"
] | 2 | 2020-11-13T18:39:27.000Z | 2021-08-25T15:59:36.000Z | read_trials.py | Volkarl/P10-ExoskeletonTransferLearning | 311daf3791c65838ff9c496eeb6526b096b41d4c | [
"MIT"
] | null | null | null | import pickle
import matplotlib.pyplot as plt
import pandas as pd
trials = pickle.load(open("trials.p", "rb"))
print("Set breakpoint here")
#for item in trials.trials:
# args = item["vals"]
# res = item["result"]["loss"]
#itemtuples = [(item["misc"]["vals"]["dilation_group"], item["misc"]["vals"]["use_ref_points"], item["result"]["loss"]) for item in trials.trials]
#(dil, ref, loss) = zip(*itemtuples)
#plt.figure()
#plt.plot(dil, 'ro')
#plt.title('Use_dilation (1 is true, 0 is false)')
#plt.plot(loss)
#plt.plot(pd.DataFrame(loss).ewm(span=1).mean())
#plt.title('MAE')
#plt.plot(ref, 'g^')
#plt.legend()
#plt.show()
print("Set breakpoint here")
print("PRINT BEST TRIALS")
myitems = [(trial["result"]["loss"], str(trial["misc"]["vals"])) for trial in trials.trials if trial["result"]["status"] == "ok"]
myitems.sort(key=lambda tup: tup[0])
for item in myitems[:10]:
print("--------------------------\n")
print(item)
print("\n\n")
# If you want to print training times use attemptid["book_time"]
| 24.52381 | 146 | 0.635922 |
fff5f55a4eee57bae636a577f32adbde97ba453e | 3,151 | py | Python | e3/provisioning/AtlassianAwsSecurity.py | sguillory6/e3 | 1505e6ea389157b9645155b9da13d6d316235f1a | [
"Apache-2.0"
] | null | null | null | e3/provisioning/AtlassianAwsSecurity.py | sguillory6/e3 | 1505e6ea389157b9645155b9da13d6d316235f1a | [
"Apache-2.0"
] | null | null | null | e3/provisioning/AtlassianAwsSecurity.py | sguillory6/e3 | 1505e6ea389157b9645155b9da13d6d316235f1a | [
"Apache-2.0"
] | null | null | null | import logging
import logging.config
import os
import subprocess
from datetime import datetime, timedelta
from botocore.credentials import CredentialProvider, RefreshableCredentials
from dateutil.tz import tzlocal
from common.E3 import e3
| 39.886076 | 114 | 0.614408 |
fff5fae09ca4ba6758cfde4e7471355a0e7af098 | 3,506 | py | Python | RecRoomAnimatedProfilePicture.py | zigzatuzoo/Rec-Room-Animated-Profile-Picture | b8eeabf478613f47d3bdb9195ad2f5051e7aaaad | [
"Apache-2.0"
] | 4 | 2021-08-17T01:13:03.000Z | 2022-03-19T04:03:01.000Z | RecRoomAnimatedProfilePicture.py | zigzatuzoo/Rec-Room-Animated-Profile-Picture | b8eeabf478613f47d3bdb9195ad2f5051e7aaaad | [
"Apache-2.0"
] | null | null | null | RecRoomAnimatedProfilePicture.py | zigzatuzoo/Rec-Room-Animated-Profile-Picture | b8eeabf478613f47d3bdb9195ad2f5051e7aaaad | [
"Apache-2.0"
] | null | null | null | ''' Stuff you need to update for this to work '''
'Enter your username here'
user = ''
'Enter your password here'
passwd = ''
image1 = '2d83af05944d49c69fa9565fb238a91b.jpg'
image2 = '49b2788b672e4088a25eb0a9eff35c17.jpg'
image3 = '355c2c7e87f0489bb5f0308cdec108f6.jpg'
" ^ You need to change EACH of these to whatever you want the 3 pics to be (Currently set to a waving red zigzag)"
''' Stuff that will change how the program works '''
speed = 0.2
"^ As you can probably guess, this changes how long the PFP stays on each image"
import time
try:
import requests
except:
print('''You do not have the requests library installed, you need to install it via the following command:
pip install requests
Thank you!''')
try:
import recnetlogin as rnl
except:
print('''You do not have the RecNetLogin package installed, you need to install it via the following command:
python -m pip install git+https://github.com/Jegarde/RecNet-Login.git#egg=recnetlogin
Thank you!''')
''' Just Initializing some values '''
login = rnl.login_to_recnet(username=user,password=passwd)
x = 0
BToken = ''
''' Making the strings into the format read by the rec.net image api '''
imageName1 = 'imageName=' + image1
imageName2 = 'imageName=' + image2
imageName3 = 'imageName=' + image3
''' Initial token request '''
BToken = login.access_token
print(BToken)
''' The loop program that actually makes the picure move '''
while 1 == 1:
''' The HTTP header for changing your In-Game pfp '''
Headers = {'sec-ch-ua':'";Not A Brand";v="99", "Chromium";v="88"',
'Accept' : '*/*',
'sec-ch-ua-mobile' : '?0',
'Authorization' : BToken,
'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8',
'Origin' : 'https://rec.net',
'Sec-Fetch-Site' : 'same-site',
'Sec-Fetch-Mode' : 'cors',
'Sec-Fetch-Dest' : 'empty',
'Referer' : 'https://rec.net/',
'Accept-Encoding' : 'gzip, deflate',
'Accept-Language' : 'en-US,en;q=0.9',
}
''' The easy way to edit what pfp plays after what '''
''' In this default format, it will show image 1 first, then image 2, then image 3, then image 2 again and will LOOP this. The x value in the function calls is to make the counter function, if you don't add it to your function calls or you delete them, THE COUNTER WILL NOT WORK. '''
x = x + 1
i1()
x = x + 1
i2()
x = x + 1
i3()
x = x + 1
i2()
''' Requests a new auth token when that one is no longer valid '''
r = requests.put('https://accounts.rec.net/account/me/profileImage', headers = Headers)
if r.status_code == 401:
print('Invalid Token')
login = rnl.login_to_recnet(username=user,password=passwd)
BToken = login.access_token
print(BToken)
| 35.77551 | 287 | 0.634341 |
fff7d77cd5951c966e8c3d645997399fd6e953c2 | 14,121 | py | Python | rcommander/src/rcommander/graph_view.py | rummanwaqar/rcommander-core | 7106d5868db76c47dea6ad11118a54351a8bd390 | [
"BSD-3-Clause"
] | 4 | 2015-04-08T09:57:43.000Z | 2021-08-12T01:44:37.000Z | rcommander/src/rcommander/graph_view.py | jhu-lcsr-forks/rcommander-core | 1a0350e9b93687eff6a4407f72b5250be5f56919 | [
"BSD-3-Clause"
] | 1 | 2015-03-12T09:10:27.000Z | 2015-03-12T09:10:27.000Z | rcommander/src/rcommander/graph_view.py | jhu-lcsr-forks/rcommander-core | 1a0350e9b93687eff6a4407f72b5250be5f56919 | [
"BSD-3-Clause"
] | 3 | 2015-03-12T10:59:17.000Z | 2021-06-21T02:13:57.000Z | #import roslib; roslib.load_manifest('rcommander_core')
import graph.style as gs
import graph
import graph.layout as gl
import tool_utils as tu
import graph_model as gm
import numpy as np
import time
import copy
| 39.116343 | 146 | 0.578217 |
fff91c879216ac70a7559f58214c7d1b3892a9ea | 3,264 | py | Python | django_input_collection/api/restframework/collection.py | pivotal-energy-solutions/django-input-collection | cc2ce3e0a7104ba9c524eaba5706da94ddb04a5f | [
"Apache-2.0"
] | null | null | null | django_input_collection/api/restframework/collection.py | pivotal-energy-solutions/django-input-collection | cc2ce3e0a7104ba9c524eaba5706da94ddb04a5f | [
"Apache-2.0"
] | 4 | 2019-08-25T15:47:24.000Z | 2022-03-24T19:35:09.000Z | django_input_collection/api/restframework/collection.py | pivotal-energy-solutions/django-input-collection | cc2ce3e0a7104ba9c524eaba5706da94ddb04a5f | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from django.urls import reverse
from rest_framework.response import Response
from rest_framework import status
from ...collection import BaseAPICollector, BaseAPISpecification
from ... import models
from . import serializers
| 38.857143 | 99 | 0.662377 |
fffc90bcd5aabe8c07f5b2517e1c835715addf0e | 770 | py | Python | DFS/depth_first_search.py | Quanta-Algorithm-Design/graphs | 3a5b6362bf60a1e2fb06d2fadab46e72124d637d | [
"MIT"
] | null | null | null | DFS/depth_first_search.py | Quanta-Algorithm-Design/graphs | 3a5b6362bf60a1e2fb06d2fadab46e72124d637d | [
"MIT"
] | null | null | null | DFS/depth_first_search.py | Quanta-Algorithm-Design/graphs | 3a5b6362bf60a1e2fb06d2fadab46e72124d637d | [
"MIT"
] | 1 | 2020-10-05T06:46:13.000Z | 2020-10-05T06:46:13.000Z | #!/usr/bin/env python3
"""
This module defines functions for depth-first-search in a graph with a given adjacency list
"""
def dfs_visit(node_list, adj_list, root_node, parent):
"""
Takes the graph node list, its adj list, and a node s,
and visits all the nodes reachable from s recursively.
"""
for node in adj_list[root_node]:
if node not in parent:
parent[node] = root_node
dfs_visit(node_list, adj_list, node, parent)
def dfs(node_list, adj_list):
"""
Iterate over possible root_nodes to explore the whole graph
"""
parent = {}
for root_node in node_list:
if root_node not in parent:
parent[root_node] = None
dfs_visit(node_list, adj_list, root_node, parent)
| 29.615385 | 91 | 0.654545 |
ffff1e4cd8bc9bad42ca402b2c639f4b45a16abe | 791 | py | Python | pirates/quest/QuestHolderBase.py | itsyaboyrocket/pirates | 6ca1e7d571c670b0d976f65e608235707b5737e3 | [
"BSD-3-Clause"
] | 3 | 2021-02-25T06:38:13.000Z | 2022-03-22T07:00:15.000Z | pirates/quest/QuestHolderBase.py | itsyaboyrocket/pirates | 6ca1e7d571c670b0d976f65e608235707b5737e3 | [
"BSD-3-Clause"
] | null | null | null | pirates/quest/QuestHolderBase.py | itsyaboyrocket/pirates | 6ca1e7d571c670b0d976f65e608235707b5737e3 | [
"BSD-3-Clause"
] | 1 | 2021-02-25T06:38:17.000Z | 2021-02-25T06:38:17.000Z | # uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.quest.QuestHolderBase
| 30.423077 | 104 | 0.694058 |
0803020bd1e3c35bd9b149aea49e7ac12f9623a3 | 933 | py | Python | setup.py | yihong0618/-nbnhhsh-cli | 3c8241dbc772b4b693e06b350c4351e75572596a | [
"Apache-2.0"
] | 33 | 2021-07-09T05:40:00.000Z | 2022-02-07T12:49:34.000Z | setup.py | yihong0618/-nbnhhsh-cli | 3c8241dbc772b4b693e06b350c4351e75572596a | [
"Apache-2.0"
] | 1 | 2021-07-09T05:37:02.000Z | 2021-07-09T05:37:02.000Z | setup.py | yihong0618/-nbnhhsh-cli | 3c8241dbc772b4b693e06b350c4351e75572596a | [
"Apache-2.0"
] | 2 | 2021-07-10T10:25:08.000Z | 2021-07-11T03:16:38.000Z | from setuptools import setup, find_packages
VERSION = "0.1.1"
setup(
name="hhsh",
version=VERSION,
description=" cli",
long_description=" cli",
keywords="python hhsh cli terminal",
author="itorr,yihong0618",
author_email="zouzou0208@gmail.com",
url="https://github.com/yihong0618/hhsh",
packages=find_packages(),
include_package_data=True,
zip_safe=True,
install_requires=["requests", "rich"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Software Development :: Libraries",
],
entry_points={
"console_scripts": ["hhsh = hhsh.hhsh:main"],
},
)
| 29.15625 | 53 | 0.621651 |
08058658e2bf102d2ac28a2a02f1701e1eb02d65 | 937 | py | Python | container/base/src/cache.py | hmrc/devops-tooling-build | 03d62df3a45d5dcce306cd6cad6c95a24a4b34ab | [
"Apache-2.0"
] | 1 | 2021-11-10T16:09:43.000Z | 2021-11-10T16:09:43.000Z | container/base/src/cache.py | hmrc/devops-tooling-build | 03d62df3a45d5dcce306cd6cad6c95a24a4b34ab | [
"Apache-2.0"
] | 6 | 2021-07-02T14:15:25.000Z | 2022-02-03T12:57:36.000Z | container/base/src/cache.py | hmrc/devops-tooling-build | 03d62df3a45d5dcce306cd6cad6c95a24a4b34ab | [
"Apache-2.0"
] | null | null | null | import datetime
import hashlib
import os
import pathlib
from typing import Optional
import yaml
| 25.324324 | 78 | 0.649947 |
08075a784b23b26531f0e2fcf4a1653e8cbbe078 | 1,118 | py | Python | tests/test_blender.py | dumpmemory/lassl | dfe56f09cc2ade6c777ad8561b24f23d83a34188 | [
"Apache-2.0"
] | null | null | null | tests/test_blender.py | dumpmemory/lassl | dfe56f09cc2ade6c777ad8561b24f23d83a34188 | [
"Apache-2.0"
] | null | null | null | tests/test_blender.py | dumpmemory/lassl | dfe56f09cc2ade6c777ad8561b24f23d83a34188 | [
"Apache-2.0"
] | null | null | null | from collections import Counter
import pytest
from datasets import load_dataset
from lassl.blender import DatasetBlender
| 30.216216 | 82 | 0.620751 |
080796109f90dd5533115b48ae3a4657f5fb4224 | 4,542 | py | Python | wisps/data_analysis/path_parser.py | caganze/WISPS | 81b91f8b49c7345ab68b7c4eb480716985e8905c | [
"MIT"
] | null | null | null | wisps/data_analysis/path_parser.py | caganze/WISPS | 81b91f8b49c7345ab68b7c4eb480716985e8905c | [
"MIT"
] | 7 | 2021-02-02T21:51:56.000Z | 2022-01-13T00:57:45.000Z | wisps/data_analysis/path_parser.py | caganze/wisps | 6572201f94a6af6d1c0a306f2f447215d4330bd7 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
After the introduction of version 6.2, all wisp data and hst-3d are now on MAST
3D-HST has not added any new data nor changed their directory structure,
but that's not the case for WISP
Aim: parse new directories to make them compatible with v5.0
"""
import os
import glob
from ..utils import memoize_func
REMOTE_FOLDER=os.environ['WISP_SURVEY_DATA']
| 34.409091 | 170 | 0.647952 |
08092e15e7923e75bbc9274300846c3ee3fbd2d9 | 158 | py | Python | tests/utils/TestTransaction.py | Shaid3r/reservations | 43e17ae88eed74593879f9f8c5a9bed7252888f7 | [
"MIT"
] | null | null | null | tests/utils/TestTransaction.py | Shaid3r/reservations | 43e17ae88eed74593879f9f8c5a9bed7252888f7 | [
"MIT"
] | null | null | null | tests/utils/TestTransaction.py | Shaid3r/reservations | 43e17ae88eed74593879f9f8c5a9bed7252888f7 | [
"MIT"
] | null | null | null | import storage
import pytest
| 15.8 | 33 | 0.683544 |
080b1f9b578c418d65d4a8c4119d27d86ab70fa5 | 2,451 | py | Python | aldryn_redirects/migrations/0003_auto_20171206_1150.py | compoundpartners/aldryn-redirects | ed1b1e90a7774a4bead771e158e30d5846e64e60 | [
"BSD-3-Clause"
] | 1 | 2020-05-14T06:41:50.000Z | 2020-05-14T06:41:50.000Z | aldryn_redirects/migrations/0003_auto_20171206_1150.py | compoundpartners/aldryn-redirects | ed1b1e90a7774a4bead771e158e30d5846e64e60 | [
"BSD-3-Clause"
] | 11 | 2016-01-11T11:42:58.000Z | 2018-11-05T16:13:27.000Z | aldryn_redirects/migrations/0003_auto_20171206_1150.py | compoundpartners/aldryn-redirects | ed1b1e90a7774a4bead771e158e30d5846e64e60 | [
"BSD-3-Clause"
] | 6 | 2016-11-22T04:53:37.000Z | 2018-11-15T13:56:39.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2017-12-06 13:50
from __future__ import unicode_literals
import aldryn_redirects.validators
from django.db import migrations, models
import django.db.models.deletion
| 50.020408 | 268 | 0.651571 |
0811b7481588bc53cfde102ac50bffe1f9e0e41c | 161 | py | Python | velocity/constants.py | aisthesis/mfstockmkt | d442ec4cb3b379f6984397926b4466420236c032 | [
"MIT"
] | null | null | null | velocity/constants.py | aisthesis/mfstockmkt | d442ec4cb3b379f6984397926b4466420236c032 | [
"MIT"
] | 1 | 2015-12-27T17:37:54.000Z | 2015-12-31T05:06:06.000Z | velocity/constants.py | aisthesis/mfstockmkt | d442ec4cb3b379f6984397926b4466420236c032 | [
"MIT"
] | 1 | 2020-05-02T08:25:35.000Z | 2020-05-02T08:25:35.000Z | """
.. Copyright (c) 2015 Marshall Farrier
license http://opensource.org/licenses/MIT
Constants
=========
"""
UPVEL_COL = 'Up Vel'
DOWNVEL_COL = 'Down Vel'
| 14.636364 | 45 | 0.652174 |
0811dfdcb7e741d544fe728950a10ae174c04263 | 3,284 | py | Python | fileForRepair/src/parking.py | ChangSeonKim/5G_UWC_project | 0504a1b1ed30787f30e18a178897978de55660ef | [
"Apache-2.0"
] | null | null | null | fileForRepair/src/parking.py | ChangSeonKim/5G_UWC_project | 0504a1b1ed30787f30e18a178897978de55660ef | [
"Apache-2.0"
] | null | null | null | fileForRepair/src/parking.py | ChangSeonKim/5G_UWC_project | 0504a1b1ed30787f30e18a178897978de55660ef | [
"Apache-2.0"
] | null | null | null | #! /usr/bin/env python3
import rospy
from geometry_msgs.msg import Twist
from sensor_msgs.msg import LaserScan
import numpy as np
import math
from std_msgs.msg import String
if __name__ =='__main__':
rospy.init_node('parking')
pub = rospy.Publisher('/cmd_vel',Twist, queue_size=10)
rospy.Subscriber('/scan',LaserScan, queue_size = 1, callback = callback)
rospy.Subscriber('helloworld03', String, callback=stop)
rospy.spin()
pass | 29.061947 | 76 | 0.546894 |
08138545899e44b68cb9f2c6902d9d5be0b380f7 | 2,622 | py | Python | opennsa/provreg.py | jmacauley/opennsa | 853c0fc8e065e74815cbc3f769939f64ac6aadeb | [
"BSD-3-Clause"
] | null | null | null | opennsa/provreg.py | jmacauley/opennsa | 853c0fc8e065e74815cbc3f769939f64ac6aadeb | [
"BSD-3-Clause"
] | null | null | null | opennsa/provreg.py | jmacauley/opennsa | 853c0fc8e065e74815cbc3f769939f64ac6aadeb | [
"BSD-3-Clause"
] | null | null | null | """
Registry for tracking providers dynamically in OpenNSA.
Keeping track of providers in a dynamical way in an NSI implementation is a
huge pain in the ass. This is a combination of things, such as seperate
identities and endpoints, callbacks, and the combination of local providers.
The class ProviderRegistry tries to keep it a bit sane.
"""
from twisted.python import log
from opennsa import error
LOG_SYSTEM = 'providerregistry'
| 34.051948 | 108 | 0.672006 |
081559dc3ab661ae3a1df9c2d52bc8d2ba1f2ae4 | 997 | py | Python | tests/test_task_tracker.py | jmchilton/shedclient-beta | 50041b488652f8bf40555b0c1ef001290f1c3f6a | [
"CC-BY-3.0"
] | 2 | 2015-12-21T02:18:54.000Z | 2016-09-08T13:56:36.000Z | tests/test_task_tracker.py | jmchilton/shedclient-beta | 50041b488652f8bf40555b0c1ef001290f1c3f6a | [
"CC-BY-3.0"
] | 1 | 2015-12-21T19:26:21.000Z | 2015-12-21T19:26:21.000Z | tests/test_task_tracker.py | jmchilton/shedclient-beta | 50041b488652f8bf40555b0c1ef001290f1c3f6a | [
"CC-BY-3.0"
] | null | null | null | from test_utils import TempDirectoryContext
from shedclient import task_tracker
| 33.233333 | 76 | 0.657974 |
081691097da1b52252fbbf22f08b3e7856a39982 | 5,825 | py | Python | gonder_rc.py | TarikCinar/python-sesli-asistan | 1a29a8d3081b67ff352cf03f7b01ac01b7118deb | [
"MIT"
] | 1 | 2021-05-28T17:27:50.000Z | 2021-05-28T17:27:50.000Z | gonder_rc.py | TarikCinar/python-sesli-asistan | 1a29a8d3081b67ff352cf03f7b01ac01b7118deb | [
"MIT"
] | null | null | null | gonder_rc.py | TarikCinar/python-sesli-asistan | 1a29a8d3081b67ff352cf03f7b01ac01b7118deb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.13.0)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x04\x1a\
\x00\
\x01\x08\x3e\x78\x9c\xed\x9c\x4d\x6e\xd3\x50\x14\x85\x1d\x65\x10\
\x66\x61\xc4\x2c\x75\x87\xdd\x85\x59\x4a\x77\x92\xcc\xd2\x59\x97\
\xc2\x12\x40\x62\x01\x2c\xa1\x48\xb4\x62\x58\x86\x0c\x10\xc1\x76\
\x52\xc7\x3e\x21\x8e\xe3\xdf\x77\xdf\xfb\xbe\xea\x22\x1d\x17\x61\
\xc7\xe7\xbe\x73\x18\x54\x8d\xa2\x59\xfa\xb5\xd9\x44\xe9\x9f\xb7\
\xd1\xdd\xbb\x59\xf4\x21\x8a\xa2\xbb\x74\xd2\x4b\xd9\xc5\xfc\x7a\
\x4e\xfa\x3d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x08\x97\x97\xed\x6a\xf3\xfc\xb0\x7a\x7a\xde\xae\xee\x9f\
\x1e\xe3\xf7\x53\x3f\x0f\x8c\x4b\xe6\xff\xcb\xc3\xcd\x2e\x9f\xed\
\xea\xf5\x79\x7b\xf3\xf8\x73\x1b\xdf\x4e\xfd\x5c\x30\x0e\x15\xff\
\x4b\x93\xee\xc1\xa7\x1f\xdb\xf8\xe3\xd4\xcf\x07\xc3\x72\xce\xff\
\x62\x0f\xe8\x06\xaf\xb9\xe4\x3f\xdd\xe0\x37\x8d\xfd\xa7\x1b\xbc\
\xa4\x8d\xff\x74\x83\x3f\x74\xf1\x9f\x6e\xb0\x4f\x2f\xfe\xd3\x0d\
\x66\xe9\xdb\x7f\xba\xc1\x16\x43\xf9\x4f\x37\xd8\x60\x70\xff\xe9\
\x06\xa7\x19\xd3\x7f\xba\xc1\x3d\xa6\xf0\x9f\x6e\x70\x87\x49\xfd\
\xa7\x1b\x26\xc7\x15\xff\xe9\x86\x69\x70\xcd\x7f\xba\x61\x5c\x9c\
\xf5\x9f\x6e\x18\x05\x0b\xfe\xd3\x0d\xc3\x61\xc9\x7f\xba\xa1\x7f\
\x4c\xfa\x5f\xce\x04\xba\xa1\x13\xd6\xfd\x2f\xf6\x80\x6e\x68\x85\
\x2f\xfe\x17\x43\x37\x5c\x85\x77\xfe\x97\x33\x81\x6e\xb8\x88\xcf\
\xfe\x17\x7b\x40\x37\x9c\x25\x04\xff\x8b\xa1\x1b\x4e\x08\xca\xff\
\x72\x26\xd0\x0d\x39\xa1\xfa\x5f\xec\x41\xe0\xdd\x10\xba\xff\xc5\
\x04\xda\x0d\xf8\x7f\x3a\x21\x75\x03\xfe\xd7\xec\x41\x00\xdd\x80\
\xff\x0d\xc6\xe3\x6e\xc0\xff\xeb\xc6\xb7\x6e\xc0\xff\x96\x7b\xe0\
\x49\x37\xe0\x7f\xc7\x31\xde\x0d\xf8\xdf\xdf\x58\xec\x06\xfc\x1f\
\x60\x0f\x0c\x75\x43\xb6\xaf\x59\x7e\xbd\x3c\xac\x3e\xa7\xbb\xf0\
\x6d\xea\x77\xe7\xd5\x18\xed\x86\xec\x79\xf7\x7b\xb1\xba\xcf\x7f\
\x3f\x58\x9a\x6b\x87\xfd\x78\x9d\xfc\x9d\x1a\x1d\x8b\xdd\x70\x8e\
\xec\x73\x64\x73\xd8\x0d\xb2\xe3\x9a\x3d\x30\xd4\x0d\x6d\x20\x3b\
\x1a\x8e\xd1\x6e\xe8\x0a\xd9\x71\x3a\x3e\x75\x43\x17\x42\xcf\x0e\
\xdf\xbb\xa1\x2b\xc1\x64\x47\xa0\xdd\xd0\x05\x5f\xb3\x83\x6e\xe8\
\x07\xeb\xd9\x41\x37\x0c\x87\xa9\xec\xa0\x1b\x46\xc7\xd5\xec\xa0\
\x1b\xc6\x01\xff\xfd\x86\xfc\xf7\x1f\x57\xcf\x70\xe3\xb3\xce\xff\
\xff\x6a\x31\x75\x86\xc9\xf8\x56\x58\x3f\xc3\x8d\x27\xd0\x8c\xf7\
\xf5\x0c\x37\x3e\xeb\x01\x64\x7c\x30\x67\xf8\x1a\xdf\x3d\xca\xf8\
\xd0\xcf\x70\xe3\x31\x9c\xf1\x9c\xe1\xf6\x63\x21\xe3\x39\xc3\x03\
\xf8\xee\x68\xc6\xf3\xf3\x9f\x03\x8e\x81\x8c\xe7\xe7\xbf\xfb\x1f\
\x0b\x19\xff\x06\xfe\xf7\xe8\xbb\xa3\x19\x5f\x07\xfe\x77\x1c\x03\
\x19\x5f\x07\xfe\xb7\x1b\x4b\x19\x5f\x07\xfe\x5f\xe9\xbb\xc1\x8c\
\xaf\x03\xff\x1b\x8c\xf1\x8c\xaf\x03\xff\xcf\x8f\x2f\x19\x5f\x07\
\xfe\xff\xc7\x77\xcf\x32\xbe\x0e\xfc\x3f\x8c\xc7\x19\x5f\x47\xe8\
\xfe\x87\x90\xf1\x75\x84\xea\x7f\x48\x19\x5f\x47\x50\xfe\x07\x9a\
\xf1\x75\x84\xe0\x7f\xe8\x19\x5f\x87\xcf\xfe\x93\xf1\x97\xf1\xce\
\x7f\x32\xfe\x2a\x7c\xf1\x9f\x8c\x6f\x87\x75\xff\xc9\xf8\x6e\x98\
\xf4\x9f\x8c\xef\x0d\x4b\xfe\x93\xf1\xfd\x63\xc1\x7f\x32\x7e\x38\
\x9c\xf5\x9f\x8c\x1f\x05\xd7\xfc\x27\xe3\xc7\xc5\x15\xff\xc9\xf8\
\x69\x98\xd4\x7f\x32\x7e\x72\xa6\xf0\x9f\x8c\x77\x87\x31\xfd\x27\
\xe3\xdd\x63\x70\xff\xc9\x78\xa7\x19\xca\x7f\x32\xde\x06\x7d\xfb\
\x4f\xc6\xdb\xa2\x17\xff\xc9\x78\xb3\x74\xf1\x9f\x8c\xb7\x4f\x1b\
\xff\xc9\x78\x7f\x68\xec\x3f\x19\xef\x25\x97\xfc\x27\xe3\xfd\xe6\
\x9c\xff\x64\x7c\x18\x54\xfc\x27\xe3\x83\x23\xff\xfd\x5e\x64\x3c\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\
\x73\xec\x42\xe7\xab\xe8\x2f\xaa\x13\xd1\x0b\xd1\x33\xd1\xd1\x5a\
\xf4\x52\xf4\x5c\x74\xa4\x3a\x16\xbd\x10\x3d\x13\x1d\x25\xa2\x97\
\xa2\xe7\xa2\xcb\x8f\x98\xeb\x58\xf4\x42\x74\xa4\x3a\x11\xbd\x14\
\x3d\x13\x7d\xbc\xe3\x41\xc7\xa2\xe7\xa2\x23\xd5\x89\xe8\x85\xe8\
\x99\xe8\xb7\x3b\x16\x7a\x29\x7a\x2e\x3a\x52\x1d\x8b\x5e\x88\x9e\
\x89\xde\x3f\x62\x49\xe7\x77\xfc\x7b\xd4\xfb\x3b\x96\x2e\xec\x1f\
\xf1\x8f\xdc\xf1\x78\xe1\xed\x33\xfe\xae\x3e\xe2\xf1\x42\x61\xc3\
\xaf\xca\x67\x2c\x2e\x94\x36\xe5\x7b\xf9\xa5\x14\x17\xe2\xb3\x5a\
\xfe\xbe\xfc\x7b\x72\x3f\x79\x1e\x79\x5e\xf9\x3c\xf2\x79\xe5\x7d\
\xe8\xfb\xd2\xf7\x59\xd2\xf2\xbe\xd5\x0f\xf5\x2b\x16\xbd\xab\x6a\
\xdd\x07\xdd\x97\x75\x55\xeb\xbe\xe9\x3e\x26\xa2\x77\x55\xad\xfb\
\x1e\x8b\x5e\x57\xb5\x9e\x27\x3d\x6f\x89\xe8\x5d\x55\xeb\x79\x8e\
\x45\xaf\xab\x5a\xf3\x42\xf3\x24\xa9\x6a\xcd\x23\xcd\xab\x58\xf4\
\xae\xaa\x35\x0f\x35\x2f\xd7\x55\xad\x79\xab\x79\x9c\x88\xae\xca\
\x93\xbc\x0f\x9c\x7f\x31\x73\xbc\x32\
"
qt_resource_name = b"\
\x00\x09\
\x0c\x78\x54\x88\
\x00\x6e\
\x00\x65\x00\x77\x00\x50\x00\x72\x00\x65\x00\x66\x00\x69\x00\x78\
\x00\x0a\
\x0a\xc8\x83\x1f\
\x00\x67\
\x00\x6f\x00\x6e\x00\x64\x00\x65\x00\x72\x00\x2e\x00\x69\x00\x63\x00\x6f\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x18\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x18\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x6e\xda\x88\xff\x59\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
qInitResources()
| 47.357724 | 103 | 0.72721 |
081745d5d369bb388f32e0870139795f7243852c | 213 | py | Python | tuples-and-sets/3_unique_names.py | Minkov/python-advanced-2020-01 | f2ee26f1325d943529673457a1cbba5657ae5905 | [
"MIT"
] | 5 | 2020-01-16T18:17:08.000Z | 2020-04-12T06:42:47.000Z | tuples-and-sets/3_unique_names.py | Minkov/python-advanced-2020-01 | f2ee26f1325d943529673457a1cbba5657ae5905 | [
"MIT"
] | null | null | null | tuples-and-sets/3_unique_names.py | Minkov/python-advanced-2020-01 | f2ee26f1325d943529673457a1cbba5657ae5905 | [
"MIT"
] | null | null | null | n = int(input())
# names = {input() for _ in range(n)}
names = []
for _ in range(n):
names.append(input())
unique_names = list(set(names))
[print(name)
for name
in sorted(unique_names, key=names.index)]
| 15.214286 | 42 | 0.643192 |
081c3a4e5b548789411fc11be988031444e552dd | 233 | py | Python | dialogue/tensorflow/task/common/common.py | ishine/nlp-dialogue | d47c1438cb5c45c2c2aebfb82fea92bef4c3d65c | [
"Apache-2.0"
] | 478 | 2020-10-28T01:30:30.000Z | 2022-03-30T06:34:07.000Z | paper-code/tensorflow_src/models/task/common/common.py | HengYongChao/nlp-paper | fcf985e3c9bfd6944d07c4c36afbaee3384d040d | [
"Apache-2.0"
] | 1 | 2021-08-29T11:55:09.000Z | 2021-11-04T09:25:19.000Z | paper-code/tensorflow_src/models/task/common/common.py | HengYongChao/nlp-paper | fcf985e3c9bfd6944d07c4c36afbaee3384d040d | [
"Apache-2.0"
] | 89 | 2021-01-05T06:11:55.000Z | 2022-03-24T12:51:57.000Z | from optparse import OptionParser
| 19.416667 | 39 | 0.622318 |
081da69448bb7e8d65c5e3d690d670101f274a22 | 587 | py | Python | posts/migrations/0003_auto_20200522_0446.py | NotBlizzard/lark | b348f8d2b532ce20581030397cbba7f6565d1c56 | [
"MIT"
] | 2 | 2020-12-10T06:13:36.000Z | 2021-01-15T09:32:41.000Z | posts/migrations/0003_auto_20200522_0446.py | NotBlizzard/lark | b348f8d2b532ce20581030397cbba7f6565d1c56 | [
"MIT"
] | 7 | 2021-03-10T21:21:55.000Z | 2021-09-22T19:20:03.000Z | posts/migrations/0003_auto_20200522_0446.py | NotBlizzard/lark | b348f8d2b532ce20581030397cbba7f6565d1c56 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.6 on 2020-05-22 04:46
from django.db import migrations, models
| 23.48 | 63 | 0.574106 |
081ea6e893da219c4f8a085a91c4473a4ef03166 | 191 | py | Python | tasking/utils/models.py | cmanallen/tasking | 0a613357148afdb4b55078c58429ec45bb60a436 | [
"MIT"
] | null | null | null | tasking/utils/models.py | cmanallen/tasking | 0a613357148afdb4b55078c58429ec45bb60a436 | [
"MIT"
] | null | null | null | tasking/utils/models.py | cmanallen/tasking | 0a613357148afdb4b55078c58429ec45bb60a436 | [
"MIT"
] | null | null | null | from django.db import models
| 21.222222 | 50 | 0.78534 |
08212ae6445b938c3145af03c666f1c2c0d5163b | 439 | py | Python | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/conftest.py | SirTelemak/cookiecutter-python-template | d7d8c4493250654a4ee3badb36c4c4da1ccb8d3d | [
"MIT"
] | 2 | 2020-06-04T19:17:13.000Z | 2020-06-05T08:05:16.000Z | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/conftest.py | SirTelemak/cookiecutter-python-template | d7d8c4493250654a4ee3badb36c4c4da1ccb8d3d | [
"MIT"
] | 1 | 2020-08-06T15:01:47.000Z | 2020-08-06T15:01:47.000Z | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/conftest.py | SirTelemak/cookiecutter-python-template | d7d8c4493250654a4ee3badb36c4c4da1ccb8d3d | [
"MIT"
] | 2 | 2020-06-15T19:26:33.000Z | 2020-11-20T20:24:03.000Z | import logging
import pytest
from loguru import logger
| 21.95 | 84 | 0.708428 |
08217e660e94837e28763173bb72fbc25fe9ee5e | 216 | py | Python | locale/pot/api/plotting/_autosummary/pyvista-Plotter-enable_lightkit-1.py | tkoyama010/pyvista-doc-translations | 23bb813387b7f8bfe17e86c2244d5dd2243990db | [
"MIT"
] | 4 | 2020-08-07T08:19:19.000Z | 2020-12-04T09:51:11.000Z | locale/pot/api/plotting/_autosummary/pyvista-Plotter-enable_lightkit-1.py | tkoyama010/pyvista-doc-translations | 23bb813387b7f8bfe17e86c2244d5dd2243990db | [
"MIT"
] | 19 | 2020-08-06T00:24:30.000Z | 2022-03-30T19:22:24.000Z | locale/pot/api/plotting/_autosummary/pyvista-Plotter-enable_lightkit-1.py | tkoyama010/pyvista-doc-translations | 23bb813387b7f8bfe17e86c2244d5dd2243990db | [
"MIT"
] | 1 | 2021-03-09T07:50:40.000Z | 2021-03-09T07:50:40.000Z | # Create a plotter without any lights and then enable the
# default light kit.
#
import pyvista
pl = pyvista.Plotter(lighting=None)
pl.enable_lightkit()
actor = pl.add_mesh(pyvista.Cube(), show_edges=True)
pl.show()
| 24 | 57 | 0.763889 |
0822f1091f07394bee07ab3fa63b7142aa217e7c | 1,353 | py | Python | sphinx/environment/managers/__init__.py | rweickelt/sphinx | 1a4c41a7691e8f78d42e2db221192962c53b27df | [
"BSD-2-Clause"
] | null | null | null | sphinx/environment/managers/__init__.py | rweickelt/sphinx | 1a4c41a7691e8f78d42e2db221192962c53b27df | [
"BSD-2-Clause"
] | null | null | null | sphinx/environment/managers/__init__.py | rweickelt/sphinx | 1a4c41a7691e8f78d42e2db221192962c53b27df | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
sphinx.environment.managers
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Manager components for sphinx.environment.
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
if False:
# For type annotation
from typing import Any # NOQA
from docutils import nodes # NOQA
from sphinx.environment import BuildEnvironment # NOQA
| 26.529412 | 68 | 0.604582 |
0822f39156313d04e61ff6ddaaed66e14edc3a38 | 3,692 | py | Python | scripts/convert_queries.py | galuscakova/podcasts | 967cc04e2b0f7cf963a189ac5270cfa69f81a540 | [
"BSD-4-Clause-UC"
] | null | null | null | scripts/convert_queries.py | galuscakova/podcasts | 967cc04e2b0f7cf963a189ac5270cfa69f81a540 | [
"BSD-4-Clause-UC"
] | null | null | null | scripts/convert_queries.py | galuscakova/podcasts | 967cc04e2b0f7cf963a189ac5270cfa69f81a540 | [
"BSD-4-Clause-UC"
] | 1 | 2021-05-27T07:44:51.000Z | 2021-05-27T07:44:51.000Z | import getopt
import sys
import os
import re
import string
import xml.etree.ElementTree as ET
input_filename = ""
expansion_filename = ""
output_type = "combine"
exclude = set(string.punctuation)
options, remainder = getopt.getopt(sys.argv[1:], 'i:e:t:', ['inputfile=', 'expansionfile=', 'type='])
for opt, arg in options:
if opt in ('-i', '--inputfile'):
input_filename = arg
if (not os.path.exists(input_filename)):
sys.exit("Error: Inputfile does not exists")
if opt in ('-e', '--expansionfile'):
expansion_filename = arg
if (not os.path.exists(expansion_filename)):
sys.exit("Error: Expansion file does not exists")
if opt in ('-t', '--type'):
output_type = arg
expansion_terms = []
if (expansion_filename != ""):
with open(expansion_filename) as expandfile:
expansion_terms = expandfile.readlines()
xml_root = ET.parse(input_filename)
print("<parameters>")
order = 0
for topic in xml_root.findall('.//topic'):
num = topic.find('num').text
query = topic.find('query').text
description = topic.find('description').text
query = query.replace('-', ' ')
query = query.replace('\n', ' ')
description = description.replace('-', ' ')
description = description.replace('\n', ' ')
query = query.translate(str.maketrans('', '', string.punctuation))
description = description.translate(str.maketrans('', '', string.punctuation))
print("<query>")
print("<number>" + str(num) + "</number>")
expansion = ""
if ( expansion_filename != ""):
line_expansion_term = expansion_terms[order]
line_expansion_term = line_expansion_term.replace("[", "")
line_expansion_term = line_expansion_term.replace("]", "")
line_expansion_term = line_expansion_term.replace('"', "")
line_expansion_term = line_expansion_term.replace('\n',"")
line_expansion_terms = line_expansion_term.split(',')
expansion = " "
max_expansion_terms = 10
for i in range (min(max_expansion_terms, len(line_expansion_terms))):
if (':' in line_expansion_terms[i]):
term,score = line_expansion_terms[i].split(':')
score = score.replace("\n", "")
if (output_type == "weights"):
expansion = expansion + str(score) + " #combine(" + term + ") "
else:
expansion = expansion + term
expansion = expansion + " "
if (output_type == "combine"):
print("<text>#combine(" + query + " " + expansion + description + ")</text>")
if (output_type == "weights"):
print("<text>#weight( 1.0 #combine(" + query + ") " + expansion + " 0.5 #combine(" + description + "))</text>")
if (output_type == "terms"):
print("<text>" + query + " " + expansion + description + "</text>")
if (output_type == "sdm"):
query_sdm = get_sdm_query(query)
description_sdm = get_sdm_query(description)
print("<text>#weight(" + query_sdm + " " + description_sdm + ")</text>")
print("</query>")
order += 1
print("</parameters>")
| 34.830189 | 119 | 0.600488 |
08235f7faf3865296eebd91470431d320d7b228e | 370 | py | Python | createGlobalMap.py | abhi20sc/autoClim | b131a19e935e8ba7778a2c73107a183df37e92da | [
"MIT"
] | 2 | 2021-07-28T05:58:20.000Z | 2021-08-16T18:27:27.000Z | createGlobalMap.py | abhi20sc/autoClim | b131a19e935e8ba7778a2c73107a183df37e92da | [
"MIT"
] | null | null | null | createGlobalMap.py | abhi20sc/autoClim | b131a19e935e8ba7778a2c73107a183df37e92da | [
"MIT"
] | 3 | 2021-08-05T15:21:05.000Z | 2021-10-04T03:42:16.000Z | import cartopy.crs as ccrs
import cartopy.feature as cf
from matplotlib import pyplot as plt
from matplotlib import image as img | 30.833333 | 64 | 0.767568 |
0823b5eeb8c1036e06aae43d61945a3ec0226291 | 2,124 | py | Python | tests/decloud_unittest.py | CNES/decloud | 6b06ae98bfe68821b4ebd0e7ba06723809cb9b42 | [
"Apache-2.0"
] | 8 | 2022-02-25T13:15:07.000Z | 2022-03-20T18:29:49.000Z | tests/decloud_unittest.py | CNES/decloud | 6b06ae98bfe68821b4ebd0e7ba06723809cb9b42 | [
"Apache-2.0"
] | 1 | 2022-02-25T13:21:33.000Z | 2022-02-25T13:21:33.000Z | tests/decloud_unittest.py | CNES/decloud | 6b06ae98bfe68821b4ebd0e7ba06723809cb9b42 | [
"Apache-2.0"
] | 1 | 2022-03-31T23:43:12.000Z | 2022-03-31T23:43:12.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import subprocess
import unittest
import filecmp
import gdal
import otbApplication as otb
from abc import ABC
from decloud.core.system import get_env_var, pathify, basename
| 38.618182 | 107 | 0.672787 |
08248ac0b1e2686f247d443d0208fc7018480282 | 1,327 | py | Python | test/test_merge.py | tawiesn/sclblonnx | 0cf73112db5df13009cd2ddb5d49744689096209 | [
"MIT"
] | null | null | null | test/test_merge.py | tawiesn/sclblonnx | 0cf73112db5df13009cd2ddb5d49744689096209 | [
"MIT"
] | null | null | null | test/test_merge.py | tawiesn/sclblonnx | 0cf73112db5df13009cd2ddb5d49744689096209 | [
"MIT"
] | null | null | null | from sclblonnx import add_output, add_input, add_node, node, empty_graph, add_constant, display, merge, run
import numpy as np
| 34.025641 | 107 | 0.605124 |
08248cc60a1189c226093e9c782fd70e1acdd43e | 2,609 | py | Python | src/cameraCalibrator.py | mdaros2016/CarND-Advanced-Lane-Lines | b27d57f1c6730f302f18fb6b8cbbfcb9361d57bf | [
"MIT"
] | null | null | null | src/cameraCalibrator.py | mdaros2016/CarND-Advanced-Lane-Lines | b27d57f1c6730f302f18fb6b8cbbfcb9361d57bf | [
"MIT"
] | null | null | null | src/cameraCalibrator.py | mdaros2016/CarND-Advanced-Lane-Lines | b27d57f1c6730f302f18fb6b8cbbfcb9361d57bf | [
"MIT"
] | null | null | null | import glob
import cv2
import numpy as np
| 38.367647 | 143 | 0.63166 |
0827c8ec658edf16eba00017e1a771b5d2f84def | 591 | py | Python | nicos_ess/dream/setups/beam_monitor.py | jkrueger1/nicos | 5f4ce66c312dedd78995f9d91e8a6e3c891b262b | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 12 | 2019-11-06T15:40:36.000Z | 2022-01-01T16:23:00.000Z | nicos_ess/dream/setups/beam_monitor.py | jkrueger1/nicos | 5f4ce66c312dedd78995f9d91e8a6e3c891b262b | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 91 | 2020-08-18T09:20:26.000Z | 2022-02-01T11:07:14.000Z | nicos_ess/dream/setups/beam_monitor.py | jkrueger1/nicos | 5f4ce66c312dedd78995f9d91e8a6e3c891b262b | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 6 | 2020-01-11T10:52:30.000Z | 2022-02-25T12:35:23.000Z | description = 'Instrument shutter'
prefix = "IOC"
devices = dict(
beam_monitor_1=device(
'nicos_ess.devices.epics.motor.EpicsMotor',
description="Beam monitor continuous position feedback",
motorpv=f'{prefix}:m8',
abslimits=(-10, 10),
unit='mm',
speed=5.,
),
beam_monitor_switch=device(
'nicos.devices.generic.Switcher',
description="Toggles between in and out of the beam",
moveable="beam_monitor_1",
mapping={
'IN': 0,
'OUT': 5,
},
precision=0.01,
)
)
| 24.625 | 64 | 0.566836 |
0829534c63fae0dfb66814593c9605ce70347325 | 28,509 | py | Python | biosteam/_system.py | tylerhuntington222/biosteam | 234959180a3210d95e39a012454f455723c92686 | [
"MIT"
] | null | null | null | biosteam/_system.py | tylerhuntington222/biosteam | 234959180a3210d95e39a012454f455723c92686 | [
"MIT"
] | null | null | null | biosteam/_system.py | tylerhuntington222/biosteam | 234959180a3210d95e39a012454f455723c92686 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# BioSTEAM: The Biorefinery Simulation and Techno-Economic Analysis Modules
# Copyright (C) 2020, Yoel Cortes-Pena <yoelcortes@gmail.com>
#
# This module is under the UIUC open-source license. See
# github.com/BioSTEAMDevelopmentGroup/biosteam/blob/master/LICENSE.txt
# for license details.
"""
"""
import flexsolve as flx
from .digraph import (digraph_from_units_and_streams,
minimal_digraph,
surface_digraph,
finalize_digraph)
from thermosteam import Stream
from thermosteam.utils import registered
from .exceptions import try_method_with_object_stamp
from ._network import Network
from ._facility import Facility
from ._unit import Unit
from .report import save_report
from .exceptions import InfeasibleRegion
from .utils import colors, strtuple
import biosteam as bst
__all__ = ('System',)
# %% Functions for taking care of numerical specifications within a system path
# %% Debugging and exception handling
def _evaluate(self, command=None):
"""
Evaluate a command and request user input for next command.
If no command, return. This function is used for debugging a System object.
"""
# Done evaluating if no command, exit debugger if 'exit'
if command is None:
Next = colors.next('Next: ') + f'{repr(self)}\n'
info = colors.info("Enter to continue or type to evaluate:\n")
command = input(Next + info + ">>> ")
if command == 'exit': raise KeyboardInterrupt()
if command:
# Build locals dictionary for evaluating command
F = bst.main_flowsheet
lcs = {self.ID: self, 'bst': bst,
**F.system.__dict__,
**F.stream.__dict__,
**F.unit.__dict__,
**F.flowsheet.__dict__
}
try:
out = eval(command, {}, lcs)
except Exception as err:
# Print exception and ask to raise error or continue evaluating
err = colors.exception(f'{type(err).__name__}:') + f' {str(err)}\n\n'
info = colors.info(f"Enter to raise error or type to evaluate:\n")
command = input(err + info + ">>> ")
if command == '': raise err
_evaluate(self, command)
else:
# If successful, continue evaluating
if out is None: pass
elif (not hasattr(out, '_ipython_display_')
or isinstance(out, type)): print(out)
else: out._ipython_display_()
command = input(">>> ")
_evaluate(self, command)
def _method_debug(self, func):
"""Method decorator for debugging system."""
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
wrapper._original = func
return wrapper
def _notify_run_wrapper(self, func):
"""Decorate a System run method to notify you after each loop"""
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
wrapper._original = func
return wrapper
# %% Process flow
from biosteam import _flowsheet as flowsheet_module | 35.725564 | 128 | 0.582377 |
082bb5b00799a75a854f5404ce105bcaeac6c3e7 | 1,005 | py | Python | modules/AI/research/findContour.py | killax-d/Counter-Coins-API | 97acede70e26b23f96883bb14e2bf6ace3759174 | [
"MIT"
] | null | null | null | modules/AI/research/findContour.py | killax-d/Counter-Coins-API | 97acede70e26b23f96883bb14e2bf6ace3759174 | [
"MIT"
] | null | null | null | modules/AI/research/findContour.py | killax-d/Counter-Coins-API | 97acede70e26b23f96883bb14e2bf6ace3759174 | [
"MIT"
] | null | null | null | import cv2
import numpy as np
image = cv2.imread('original.png')
gray = cv2.cvtColor(image.copy(), cv2.COLOR_BGR2GRAY)
gray = cv2.equalizeHist(gray)
blur = cv2.GaussianBlur(gray, (19, 19), 0)
# Application d'un seuil pour obtenir une image binaire
thresh = cv2.adaptiveThreshold(blur, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV, 11, 1)
kernel = np.ones((3, 3), np.uint8)
# Application d'rosion et d'ouverture pour supprimer les contours de petites pices
closing = cv2.morphologyEx(thresh, cv2.MORPH_CLOSE, kernel, iterations=1)
contours, hierarchy = cv2.findContours(closing.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
for contour in contours:
area = cv2.contourArea(contour)
if area < 10000 or area > 50000:
continue
print(area)
if len(contour) < 5:
continue
try:
ellipse = cv2.fitEllipse(contour)
cv2.ellipse(image, ellipse, (0,255,0), 2)
except:
pass
# ecriture de l'image
cv2.imwrite('result.png', image) | 30.454545 | 103 | 0.711443 |
083461c10e66e08e6e0c8ad2d8f84b46b0b09e65 | 8,413 | py | Python | python/src/ties/cli/test/ties_convert_tests.py | Noblis/ties-lib | e7c6165ebcd80e11b792fd4bcddf6ce634da0c60 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2020-04-10T19:02:27.000Z | 2020-04-10T19:02:27.000Z | python/src/ties/cli/test/ties_convert_tests.py | Noblis/ties-lib | e7c6165ebcd80e11b792fd4bcddf6ce634da0c60 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | python/src/ties/cli/test/ties_convert_tests.py | Noblis/ties-lib | e7c6165ebcd80e11b792fd4bcddf6ce634da0c60 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | ################################################################################
# Copyright 2019 Noblis, Inc #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
################################################################################
import json
import os
import unittest
from stat import S_IRUSR
from tempfile import mkstemp
from unittest import TestCase
from ties.cli.ties_convert import main
from ties.util.testing import cli_test
short_usage = """\
usage: ties-convert [-h] [--classification-level SECURITY_TAG]
[--output-file OUTPUT_FILE | --in-place] [--version]
EXPORT_PATH"""
long_usage = """\
{}
Converts TIES export.json files from older versions of the schema (0.1.8, 0.2,
0.3, 0.4, 0.5, 0.6, 0.7, 0.8) to the current version (0.9).
positional arguments:
EXPORT_PATH the path to the TIES JSON file or - to read from stdin
optional arguments:
-h, --help show this help message and exit
--classification-level SECURITY_TAG, -c SECURITY_TAG
the classification level of the TIES JSON, required
for TIES JSON from pre-0.3 versions of the schema
--output-file OUTPUT_FILE, -f OUTPUT_FILE
the output file path for the converted TIES JSON
--in-place, -i modifies the input file in-place, overwriting it with
the converted JSON data
--version prints version information
""".format(short_usage)
test_input = """\
{
"version": "0.1.8",
"objectItem": [
{
"sha256Hash": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"md5Hash": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
}"""
test_output = """\
{
"version": "0.9",
"securityTag": "UNCLASSIFIED",
"objectItems": [
{
"objectId": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sha256Hash": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"md5Hash": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"authorityInformation": {
"securityTag": "UNCLASSIFIED"
}
}
]
}"""
if __name__ == '__main__':
unittest.main()
| 36.578261 | 111 | 0.562463 |
0834a96e609f196a4e397fc0d0398ea157ccd7e5 | 2,316 | py | Python | Edge Detection.py | paulmtree/Lung-Segmentation-Project | 2cffe09ce6a4818200d88b9e4e87155feb594366 | [
"MIT"
] | 14 | 2020-11-10T16:47:54.000Z | 2022-03-15T12:17:29.000Z | Edge Detection.py | paulmtree/Lung-Segmentation-Project | 2cffe09ce6a4818200d88b9e4e87155feb594366 | [
"MIT"
] | 3 | 2020-11-21T09:49:15.000Z | 2021-05-30T23:58:30.000Z | Edge Detection.py | paulmtree/Lung-Segmentation-Project | 2cffe09ce6a4818200d88b9e4e87155feb594366 | [
"MIT"
] | 3 | 2021-11-04T18:08:53.000Z | 2022-01-13T03:22:26.000Z | from PIL import Image, ImageFilter
import numpy as np
import glob
from numpy import array
import matplotlib.pyplot as plt
from skimage import morphology
import scipy.ndimage
"""
datapath = "jpg_images/"
img0 = Image.open("jpg_images/maskedimage" + str(0) + ".jpg")
counter = 0
img1 = []
for f in glob.glob('/Users/paulmccabe/Desktop/jpg images/*.jpg'):
path = "jpg_images/maskedimage" + str(counter) + ".jpg"
img0 = Image.open(path).convert('L')
img1.append(array(img0))
counter += 1
print("Counter: " + str(counter))
imgs_to_process_orig = np.stack([s for s in img1])
"""
id = 2
imgs = np.load("/Users/paulmccabe/Desktop/Segmentation Project/" + "justmask_%d.npy" % (id))
counter = 0
print("Saving as jpg Images...")
for img in imgs:
scipy.misc.imsave('/Users/paulmccabe/Desktop/Segmentation Project' + '/jpg mask images/justmask{}.jpg'.format(counter), img)
counter += 1
counter = 0
#print("Re-Importing jpg Images...")
#for f in glob.glob('/Users/paulmccabe/Desktop/Segmentation Project/jpg mask images/*.jpg'):
# path = "jpg_images/maskedimage" + str(counter) + ".jpg"
# img0 = Image.open(path).convert('L')
# img1.append(array(img0))
# counter += 1
imgs[imgs == 1] = 255
list = []
for img in imgs:
PIL_img = Image.fromarray(img.astype('uint8'))
PIL_edge = PIL_img.filter(ImageFilter.FIND_EDGES)
np_img = array(PIL_edge)
dilation = morphology.dilation(np_img, np.ones([4,4]))
list.append(dilation)
imgs_after_processing = np.stack([s for s in list])
np.save("/Users/paulmccabe/Desktop/Segmentation Project" + "/justedge_%d.npy" % (id), imgs_after_processing[:284])
#sample_stack(np_img) | 35.090909 | 128 | 0.658895 |
08354cb83dbefe75aa87b426bfa4c3e544572c47 | 2,191 | py | Python | benchmark.py | Umass-ITS/Open3D-PointNet2-Semantic3D | 0254926f62cbca695aa1e76a18fec0863be5e455 | [
"MIT"
] | 330 | 2019-04-10T21:31:24.000Z | 2021-07-26T06:16:17.000Z | benchmark.py | largeword/Open3D-PointNet2-Semantic3D | 3a9751dc724877933fc883320100796cef23489d | [
"MIT"
] | 44 | 2019-04-10T15:28:36.000Z | 2021-06-22T17:39:05.000Z | benchmark.py | largeword/Open3D-PointNet2-Semantic3D | 3a9751dc724877933fc883320100796cef23489d | [
"MIT"
] | 78 | 2019-04-08T09:39:29.000Z | 2021-06-08T02:39:14.000Z | import json
import numpy as np
import tensorflow as tf
import time
from predict import Predictor
if __name__ == "__main__":
checkpoint = "logs/semantic_backup_full_submit_dec_10/best_model_epoch_275.ckpt"
hyper_params = json.loads(open("semantic.json").read())
predictor = Predictor(
checkpoint_path=checkpoint, num_classes=9, hyper_params=hyper_params
)
batch_size = 64
# Init data
points_with_colors = np.random.randn(batch_size, hyper_params["num_point"], 6)
# Warm up
pd_labels = predictor.predict(points_with_colors)
# Benchmark
s = time.time()
profiler = tf.profiler.Profiler(predictor.sess.graph)
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
_ = predictor.predict(
points_with_colors, run_options=run_options, run_metadata=run_metadata
)
profiler.add_step(0, run_metadata)
batch_time = time.time() - s
sample_time = batch_time / batch_size
print(
"Batch size: {}, batch_time: {}, sample_time: {}".format(
batch_size, batch_time, sample_time
)
)
option_builder = tf.profiler.ProfileOptionBuilder
opts = (
option_builder(option_builder.time_and_memory())
.with_step(-1) # with -1, should compute the average of all registered steps.
.with_file_output("tf-profile.txt")
.select(["micros", "bytes", "occurrence"])
.order_by("micros")
.build()
)
# Profiling info about ops are saved in 'test-%s.txt' % FLAGS.out
profiler.profile_operations(options=opts)
for batch_size in [2 ** n for n in range(8)]:
# Init data
points_with_colors = np.random.randn(batch_size, hyper_params["num_point"], 6)
# Warm up
pd_labels = predictor.predict(points_with_colors)
# Benchmark
s = time.time()
_ = predictor.predict(points_with_colors)
batch_time = time.time() - s
sample_time = batch_time / batch_size
print(
"Batch size: {}, batch_time: {}, sample_time: {}".format(
batch_size, batch_time, sample_time
)
)
| 30.013699 | 86 | 0.652214 |
0836babd9f72c506519d713c961b9257fd759c19 | 447 | py | Python | tests/my_select_group.py | oldjun/PyMyORM | ac49910f21d3f3d3d4b3d75a0f998526963f0a2a | [
"MIT"
] | 1 | 2021-12-01T23:47:24.000Z | 2021-12-01T23:47:24.000Z | tests/my_select_group.py | oldjun/PyMyORM | ac49910f21d3f3d3d4b3d75a0f998526963f0a2a | [
"MIT"
] | null | null | null | tests/my_select_group.py | oldjun/PyMyORM | ac49910f21d3f3d3d4b3d75a0f998526963f0a2a | [
"MIT"
] | 2 | 2022-01-03T15:03:37.000Z | 2022-02-16T09:00:58.000Z | from pymyorm.database import Database
from config import db
from models.user import User
if __name__ == '__main__':
Database.connect(**db)
# # case 1
# all = User.find().select('count(*) as count', 'money').group('money').order('count asc').all()
# for one in all:
# print(one)
all = User.find().select('gender', 'count(*) as count', 'avg(money) as avg').group('gender').all()
for one in all:
print(one)
| 27.9375 | 102 | 0.615213 |
083725212ef9f198c79212406fcc54599eb1abb4 | 2,783 | py | Python | framework/codejam/extract/cyclomatic_complexity.py | neizod/coding-analysis | cc086bcf204e570032d11b12a46ac819cfe93f2b | [
"MIT"
] | 1 | 2015-05-22T05:01:53.000Z | 2015-05-22T05:01:53.000Z | framework/codejam/extract/cyclomatic_complexity.py | neizod/coding-analysis | cc086bcf204e570032d11b12a46ac819cfe93f2b | [
"MIT"
] | null | null | null | framework/codejam/extract/cyclomatic_complexity.py | neizod/coding-analysis | cc086bcf204e570032d11b12a46ac819cfe93f2b | [
"MIT"
] | null | null | null | import os
import json
import logging
from framework._utils import FunctionHook
| 42.815385 | 75 | 0.606899 |
08384f79281339a1e0387a70c9f20061ae7f5d42 | 64 | py | Python | tests/basic/numeric.py | MoonStarCZW/py2rb | 89b247717d33d780fbf143e1583bfe9252984da4 | [
"MIT"
] | null | null | null | tests/basic/numeric.py | MoonStarCZW/py2rb | 89b247717d33d780fbf143e1583bfe9252984da4 | [
"MIT"
] | null | null | null | tests/basic/numeric.py | MoonStarCZW/py2rb | 89b247717d33d780fbf143e1583bfe9252984da4 | [
"MIT"
] | null | null | null | print(int(2.0))
print(float(2))
print(abs(-2.0))
print(abs(-2))
| 12.8 | 16 | 0.625 |
083ac0bbfaedec44e83a000de5fcb0cfa49ed48e | 310 | py | Python | tests/symmetry/test_point_group.py | kijanac/Materia | b49af518c8eff7d3a8c6caff39783e3daf80a7a0 | [
"MIT"
] | null | null | null | tests/symmetry/test_point_group.py | kijanac/Materia | b49af518c8eff7d3a8c6caff39783e3daf80a7a0 | [
"MIT"
] | null | null | null | tests/symmetry/test_point_group.py | kijanac/Materia | b49af518c8eff7d3a8c6caff39783e3daf80a7a0 | [
"MIT"
] | null | null | null | # import materia as mtr
# import numpy as np
# def test_point_group_C1():
# ctable = mtr.symmetry.C1().cayley_table()
# assert (ctable == np.array([[0]])).all()
# def test_point_group_Ci():
# ctable = mtr.symmetry.Ci().cayley_table()
# assert (ctable == np.array([[0, 1], [1, 0]])).all()
| 20.666667 | 57 | 0.606452 |
083e03b527a87a9ebea41c58c4a9944e76e7007f | 1,948 | py | Python | extrator/test/test_pipeline.py | MinisterioPublicoRJ/robotj | 946e9547eea6f548609f7ccfaf1c6a13fffece65 | [
"MIT"
] | 3 | 2018-03-13T12:17:13.000Z | 2021-04-18T19:55:04.000Z | extrator/test/test_pipeline.py | MinisterioPublicoRJ/robotj | 946e9547eea6f548609f7ccfaf1c6a13fffece65 | [
"MIT"
] | 1 | 2018-06-19T13:09:10.000Z | 2018-06-19T13:09:10.000Z | extrator/test/test_pipeline.py | MinisterioPublicoRJ/robotj | 946e9547eea6f548609f7ccfaf1c6a13fffece65 | [
"MIT"
] | 1 | 2021-04-18T19:55:09.000Z | 2021-04-18T19:55:09.000Z | from unittest.mock import patch, MagicMock
from unittest import TestCase
from ..crawler.pipeliner import pipeline
from ..settings import URL_PROCESSO
| 38.96 | 76 | 0.661704 |
083f30db4f011f2e287409fe5ae43ef0e966b47a | 3,943 | py | Python | tests/test_step.py | arup-group/mc | 50b8faa8b9d40dece88e0a27f911edd427ebc064 | [
"MIT"
] | null | null | null | tests/test_step.py | arup-group/mc | 50b8faa8b9d40dece88e0a27f911edd427ebc064 | [
"MIT"
] | 12 | 2021-12-14T15:10:43.000Z | 2022-03-31T13:39:25.000Z | tests/test_step.py | arup-group/mc | 50b8faa8b9d40dece88e0a27f911edd427ebc064 | [
"MIT"
] | null | null | null | from pathlib import Path
import pytest
from copy import deepcopy
import os
from mc.base import BaseConfig
from mc import step
| 39.43 | 103 | 0.672331 |
0840e1f2cca91c8f40fea1035d91f9ed0ea2c8f1 | 15,552 | py | Python | lambda_functions.py | intirix/serverless-secrets-manager | 2c89b2c497f7078c38885125dfa79db944a214db | [
"Apache-2.0"
] | 2 | 2018-05-23T06:04:13.000Z | 2020-11-04T23:16:09.000Z | lambda_functions.py | intirix/serverless-secrets-manager | 2c89b2c497f7078c38885125dfa79db944a214db | [
"Apache-2.0"
] | null | null | null | lambda_functions.py | intirix/serverless-secrets-manager | 2c89b2c497f7078c38885125dfa79db944a214db | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
import system
import db
import client
import server
import logging
import json
import base64
import os
from aws_xray_sdk.core import patch_all
if "AWS_REGION" in os.environ:
patch_all()
_singleton = None
FORMAT = "%(asctime)-15s %(message)s"
logging.basicConfig(format=FORMAT)
| 29.622857 | 84 | 0.600244 |
08418a8370fcf775a2fd7e29466ecc715efe0e4f | 2,575 | py | Python | tests/utils_test.py | asrashley/dash-live | 1ffbc57896e4e46855a42af6ef79a1865ebfce55 | [
"Apache-2.0"
] | 2 | 2019-11-02T06:26:29.000Z | 2020-05-15T16:54:20.000Z | tests/utils_test.py | asrashley/dash-live | 1ffbc57896e4e46855a42af6ef79a1865ebfce55 | [
"Apache-2.0"
] | 1 | 2020-01-20T17:20:54.000Z | 2020-01-21T08:38:30.000Z | tests/utils_test.py | asrashley/dash-live | 1ffbc57896e4e46855a42af6ef79a1865ebfce55 | [
"Apache-2.0"
] | null | null | null |
try:
import cStringIO as StringIO
except ImportError:
import StringIO
import datetime
import os
import sys
import unittest
_src = os.path.join(os.path.dirname(__file__),"..", "src")
if not _src in sys.path:
sys.path.append(_src)
import utils
if __name__ == "__main__":
unittest.main()
| 35.763889 | 97 | 0.597282 |
08432f03ae4911f91726c50919d96811876b71c7 | 9,364 | py | Python | midv500/download_dataset.py | fcakyon/midv500-to-coco | 2f1cd74e0bb8da2301a96e3fb0cd9f17005ed08c | [
"MIT"
] | 39 | 2020-05-15T17:34:32.000Z | 2022-03-25T08:22:47.000Z | midv500/download_dataset.py | fcakyon/midv500-to-coco | 2f1cd74e0bb8da2301a96e3fb0cd9f17005ed08c | [
"MIT"
] | 1 | 2020-08-04T09:04:06.000Z | 2020-08-19T12:50:15.000Z | midv500/download_dataset.py | fcakyon/midv500-to-coco | 2f1cd74e0bb8da2301a96e3fb0cd9f17005ed08c | [
"MIT"
] | 6 | 2020-04-23T19:40:16.000Z | 2021-12-19T17:52:42.000Z | import os
import argparse
from midv500.utils import download, unzip
midv500_links = [
"ftp://smartengines.com/midv-500/dataset/01_alb_id.zip",
"ftp://smartengines.com/midv-500/dataset/02_aut_drvlic_new.zip",
"ftp://smartengines.com/midv-500/dataset/03_aut_id_old.zip",
"ftp://smartengines.com/midv-500/dataset/04_aut_id.zip",
"ftp://smartengines.com/midv-500/dataset/05_aze_passport.zip",
"ftp://smartengines.com/midv-500/dataset/06_bra_passport.zip",
"ftp://smartengines.com/midv-500/dataset/07_chl_id.zip",
"ftp://smartengines.com/midv-500/dataset/08_chn_homereturn.zip",
"ftp://smartengines.com/midv-500/dataset/09_chn_id.zip",
"ftp://smartengines.com/midv-500/dataset/10_cze_id.zip",
"ftp://smartengines.com/midv-500/dataset/11_cze_passport.zip",
"ftp://smartengines.com/midv-500/dataset/12_deu_drvlic_new.zip",
"ftp://smartengines.com/midv-500/dataset/13_deu_drvlic_old.zip",
"ftp://smartengines.com/midv-500/dataset/14_deu_id_new.zip",
"ftp://smartengines.com/midv-500/dataset/15_deu_id_old.zip",
"ftp://smartengines.com/midv-500/dataset/16_deu_passport_new.zip",
"ftp://smartengines.com/midv-500/dataset/17_deu_passport_old.zip",
"ftp://smartengines.com/midv-500/dataset/18_dza_passport.zip",
"ftp://smartengines.com/midv-500/dataset/19_esp_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/20_esp_id_new.zip",
"ftp://smartengines.com/midv-500/dataset/21_esp_id_old.zip",
"ftp://smartengines.com/midv-500/dataset/22_est_id.zip",
"ftp://smartengines.com/midv-500/dataset/23_fin_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/24_fin_id.zip",
"ftp://smartengines.com/midv-500/dataset/25_grc_passport.zip",
"ftp://smartengines.com/midv-500/dataset/26_hrv_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/27_hrv_passport.zip",
"ftp://smartengines.com/midv-500/dataset/28_hun_passport.zip",
"ftp://smartengines.com/midv-500/dataset/29_irn_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/30_ita_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/31_jpn_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/32_lva_passport.zip",
"ftp://smartengines.com/midv-500/dataset/33_mac_id.zip",
"ftp://smartengines.com/midv-500/dataset/34_mda_passport.zip",
"ftp://smartengines.com/midv-500/dataset/35_nor_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/36_pol_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/37_prt_id.zip",
"ftp://smartengines.com/midv-500/dataset/38_rou_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/39_rus_internalpassport.zip",
"ftp://smartengines.com/midv-500/dataset/40_srb_id.zip",
"ftp://smartengines.com/midv-500/dataset/41_srb_passport.zip",
"ftp://smartengines.com/midv-500/dataset/42_svk_id.zip",
"ftp://smartengines.com/midv-500/dataset/43_tur_id.zip",
"ftp://smartengines.com/midv-500/dataset/44_ukr_id.zip",
"ftp://smartengines.com/midv-500/dataset/45_ukr_passport.zip",
"ftp://smartengines.com/midv-500/dataset/46_ury_passport.zip",
"ftp://smartengines.com/midv-500/dataset/47_usa_bordercrossing.zip",
"ftp://smartengines.com/midv-500/dataset/48_usa_passportcard.zip",
"ftp://smartengines.com/midv-500/dataset/49_usa_ssn82.zip",
"ftp://smartengines.com/midv-500/dataset/50_xpo_id.zip",
]
midv2019_links = [
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/01_alb_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/02_aut_drvlic_new.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/03_aut_id_old.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/04_aut_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/05_aze_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/06_bra_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/07_chl_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/08_chn_homereturn.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/09_chn_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/10_cze_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/11_cze_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/12_deu_drvlic_new.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/13_deu_drvlic_old.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/14_deu_id_new.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/15_deu_id_old.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/16_deu_passport_new.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/17_deu_passport_old.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/18_dza_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/19_esp_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/20_esp_id_new.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/21_esp_id_old.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/22_est_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/23_fin_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/24_fin_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/25_grc_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/26_hrv_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/27_hrv_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/28_hun_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/29_irn_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/30_ita_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/31_jpn_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/32_lva_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/33_mac_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/34_mda_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/35_nor_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/36_pol_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/37_prt_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/38_rou_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/39_rus_internalpassport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/40_srb_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/41_srb_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/42_svk_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/43_tur_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/44_ukr_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/45_ukr_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/46_ury_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/47_usa_bordercrossing.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/48_usa_passportcard.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/49_usa_ssn82.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/50_xpo_id.zip",
]
def download_dataset(download_dir: str, dataset_name: str = "midv500"):
"""
This script downloads the MIDV-500 dataset with extra files and unzips the folders.
dataset_name: str
"midv500": https://doi.org/10.18287/2412-6179-2019-43-5-818-824
"midv2019": https://doi.org/10.1117/12.2558438
"all": midv500 + midv2019
"""
if dataset_name == "midv500":
links_set = {
"midv500": midv500_links,
}
elif dataset_name == "midv2019":
links_set = {
"midv2019": midv2019_links,
}
elif dataset_name == "all":
links_set = {
"midv500": midv500_links,
"midv2019": midv2019_links,
}
else:
Exception('Invalid dataset_name, try one of "midv500", "midv2019" or "all".')
for k, v in links_set.items():
dst = os.path.join(download_dir, k)
for link in v:
print("--------------------------------------------------------------")
# download zip file
link = link.replace("\\", "/") # for windows
filename = link.split("/")[-1]
print("\nDownloading:", filename)
download(link, dst)
print("Downloaded:", filename)
# unzip zip file
print("Unzipping:", filename)
zip_path = os.path.join(dst, filename)
unzip(zip_path, dst)
print("Unzipped:", filename.replace(".zip", ""))
# remove zip file
os.remove(zip_path)
if __name__ == "__main__":
# construct the argument parser
ap = argparse.ArgumentParser()
# add the arguments to the parser
ap.add_argument(
"download_dir",
default="data/",
help="Directory for MIDV-500 dataset to be downloaded.",
)
args = vars(ap.parse_args())
# download dataset
download_dataset(args["download_dir"])
| 54.127168 | 90 | 0.705788 |
0845053b64f5370f1498b8e4729e90a827f0c839 | 6,329 | py | Python | erpnext_taxjar/api.py | DigiThinkIT/erpnext_taxjar | 5313dbdd931745e9655d3f5fd53c830abb0d7ee7 | [
"MIT"
] | null | null | null | erpnext_taxjar/api.py | DigiThinkIT/erpnext_taxjar | 5313dbdd931745e9655d3f5fd53c830abb0d7ee7 | [
"MIT"
] | 8 | 2017-07-01T11:13:14.000Z | 2020-11-19T13:26:29.000Z | erpnext_taxjar/api.py | DigiThinkIT/erpnext_taxjar | 5313dbdd931745e9655d3f5fd53c830abb0d7ee7 | [
"MIT"
] | 13 | 2017-06-30T15:47:00.000Z | 2022-02-22T16:24:41.000Z | import traceback
import pycountry
import taxjar
import frappe
from erpnext import get_default_company
from frappe import _
from frappe.contacts.doctype.address.address import get_company_address
TAX_ACCOUNT_HEAD = frappe.db.get_single_value("TaxJar Settings", "tax_account_head")
SHIP_ACCOUNT_HEAD = frappe.db.get_single_value("TaxJar Settings", "shipping_account_head")
| 26.931915 | 133 | 0.746721 |
08453ede8c646dbf40688a3665092cf3d4f4e359 | 3,543 | py | Python | tests/lib_test.py | grundrauschen/center-points | 5a12f68ac012a0a2bf52d8a8381d0272e309ac18 | [
"MIT"
] | null | null | null | tests/lib_test.py | grundrauschen/center-points | 5a12f68ac012a0a2bf52d8a8381d0272e309ac18 | [
"MIT"
] | 2 | 2015-06-03T10:57:13.000Z | 2015-09-15T12:43:22.000Z | tests/lib_test.py | fu-berlin-swp-2014/center-points | 0fa523314a3168d4d229b6f61d0d05d314a8b35a | [
"MIT"
] | null | null | null | import unittest
import numpy as np
import numpy.testing as nptest
import centerpoints.lib as lib
| 36.90625 | 76 | 0.519616 |
084547589496d6e3bddafc72879279f994ed30e1 | 711 | py | Python | genome-experimentation/cleaning-genome-data.py | shivamsyal/summer21 | 68cdcae1524e720066e57baa190f15477b69515a | [
"MIT"
] | null | null | null | genome-experimentation/cleaning-genome-data.py | shivamsyal/summer21 | 68cdcae1524e720066e57baa190f15477b69515a | [
"MIT"
] | null | null | null | genome-experimentation/cleaning-genome-data.py | shivamsyal/summer21 | 68cdcae1524e720066e57baa190f15477b69515a | [
"MIT"
] | 2 | 2022-01-10T18:16:18.000Z | 2022-03-20T01:17:28.000Z | # test comment
import os
filename = input("File to format: ")
os.system("gunzip "+filename)
n = int(input("What number genome is this? "))
os.system("mv "+filename[:-3]+" genome"+str(n)+".fna")
original = "genome"+str(n)+".fna"
copy = "genome"+str(n)+"_copy.fna"
filtered = "genome"+str(n)+"_filtered.fna"
rem = ['>']
with open(original) as old, open(copy,'w') as new:
for line in old:
if not any(bad in line for bad in rem):
new.write(line)
with open(copy) as f, open(filtered,'a') as f2:
f2.write("".join(line.strip() for line in f))
with open(filtered, 'r+') as inp:
y = inp.read().upper()
inp.truncate(0)
with open(filtered, 'a') as out:
out.write(y)
os.remove(copy)
| 30.913043 | 54 | 0.624473 |
084592c05031adcf4e22889393a72a2880d58eb8 | 758 | py | Python | villas/controller/components/managers/generic.py | VILLASframework/VILLAScontroller | e672439797f209afdd5bc62078f7d49c60269aa4 | [
"Apache-2.0"
] | null | null | null | villas/controller/components/managers/generic.py | VILLASframework/VILLAScontroller | e672439797f209afdd5bc62078f7d49c60269aa4 | [
"Apache-2.0"
] | null | null | null | villas/controller/components/managers/generic.py | VILLASframework/VILLAScontroller | e672439797f209afdd5bc62078f7d49c60269aa4 | [
"Apache-2.0"
] | null | null | null | from villas.controller.components.manager import Manager
from villas.controller.component import Component
| 28.074074 | 76 | 0.62533 |
0845d2588d5c55abf24f9ab405009bd284d758d8 | 833 | py | Python | tests/test_composition.py | gregorynicholas/proto-pigeon | 65a5d961e7a8506f3a968b21aaf68f625fd13190 | [
"Apache-2.0"
] | null | null | null | tests/test_composition.py | gregorynicholas/proto-pigeon | 65a5d961e7a8506f3a968b21aaf68f625fd13190 | [
"Apache-2.0"
] | null | null | null | tests/test_composition.py | gregorynicholas/proto-pigeon | 65a5d961e7a8506f3a968b21aaf68f625fd13190 | [
"Apache-2.0"
] | null | null | null | from protorpc.messages import Message, IntegerField, StringField
import protopigeon
def test():
ComposedMessage = protopigeon.compose(MessageOne, MessageTwo)
assert hasattr(ComposedMessage, 'one')
assert hasattr(ComposedMessage, 'two')
assert hasattr(ComposedMessage, 'three')
assert hasattr(ComposedMessage, 'four')
# Make sure these fields weren't modified
assert MessageOne.one.number == 1
assert MessageOne.two.number == 2
assert MessageTwo.three.number == 1
assert MessageTwo.four.number == 2
instance = ComposedMessage(
one=1,
two=2,
three='three',
four='four')
assert instance
| 23.138889 | 65 | 0.686675 |
0846011f39bb03a7af3bf569426365af42543fe1 | 1,503 | py | Python | udacity-program_self_driving_car_engineer_v2.0/module02-computer vision/exercise02-data acquisiton and visualization/visualization.py | linksdl/futuretec-project-self_driving_cars_projects | 38e8f14543132ec86a8bada8d708eefaef23fee8 | [
"MIT"
] | null | null | null | udacity-program_self_driving_car_engineer_v2.0/module02-computer vision/exercise02-data acquisiton and visualization/visualization.py | linksdl/futuretec-project-self_driving_cars_projects | 38e8f14543132ec86a8bada8d708eefaef23fee8 | [
"MIT"
] | null | null | null | udacity-program_self_driving_car_engineer_v2.0/module02-computer vision/exercise02-data acquisiton and visualization/visualization.py | linksdl/futuretec-project-self_driving_cars_projects | 38e8f14543132ec86a8bada8d708eefaef23fee8 | [
"MIT"
] | null | null | null | """
# !/usr/bin/env python
# -*- coding: utf-8 -*-
@Time : 2022/2/23 19:35
@Author : shengdl999links@gmail.com
@ProjectName : udacity-program_self_driving_car_engineer_v1.0_source.0
@File : visualization.py
"""
import glob
import os.path
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
from PIL import Image
from utils import get_data
def viz(ground_truth):
"""
create a grid visualization of images with color coded bboxes
args:
- ground_truth [list[dict]]: ground truth data
"""
# IMPLEMENT THIS FUNCTION
paths = glob.glob('../data/images/*')
gt_dic = {}
# mapping to access data faster
for gt in ground_truth:
gt_dic[gt['filename']] = gt
# color mapping of classes
color_map = {1: [1, 0, 0], 2: [0, 1, 0], 4: [0, 0, 1]}
f, ax = plt.subplots(4, 5, figsize=(20, 10))
for i in range(20):
x = i % 4
y = i % 5
filename = os.path.basename(paths[i])
img = Image.open(paths[i])
ax[x, y].imshow(img)
bboxes = gt_dic[filename]['boxes']
classes = gt_dic[filename]['classes']
for cl, bb in zip(classes, bboxes):
y1, x1, y2, x2 = bb
rec = Rectangle((x1, y1), x2 - x1, y2 - y1, facecolor='none', edgecolor=color_map[cl])
ax[x, y].add_patch(rec)
ax[x, y].axis('off')
plt.tight_layout()
plt.show()
if __name__ == "__main__":
ground_truth, _ = get_data()
viz(ground_truth)
| 25.05 | 98 | 0.594145 |
084746dfc5f458e9131b1743d5567db36da8ab9c | 898 | py | Python | setup.py | georgenicolaou/python-fakeports | 24eecf879e0d2d2a100be06952fb3677019457e2 | [
"MIT"
] | 3 | 2020-02-03T08:25:10.000Z | 2021-09-29T15:59:01.000Z | setup.py | georgenicolaou/python-fakeports | 24eecf879e0d2d2a100be06952fb3677019457e2 | [
"MIT"
] | 2 | 2021-01-18T19:27:44.000Z | 2021-01-18T19:27:44.000Z | setup.py | georgenicolaou/python-fakeports | 24eecf879e0d2d2a100be06952fb3677019457e2 | [
"MIT"
] | null | null | null | from setuptools import setup
long_description = 'TODO'
# with open("README.md", "r") as rfd:
# long_description = rfd.read()
REQUIREMENTS = [r.strip() for r in open("requirements.txt").readlines()]
setup(
name='python-fakeports',
version="0.1",
packages=['python_fakeports'],
url='',
license='GPL',
author='George Nicolaou',
author_email='george@silensec.com',
description='Python clone of portspoof',
long_description=long_description,
install_requires=REQUIREMENTS,
data_files=[
('/etc/fakeports/', ['fakeports.yml.sample']),
('/usr/local/bin/', ['bin/fakeports.tac'])
],
scripts=['bin/fakeportsctl', 'bin/fakeportsd'],
platforms='any',
classifiers = [line.strip() for line in '''\
Development Status :: 4 - Beta
Intended Audience :: System Administrators
Operating System :: POSIX :: Linux
''']
)
| 28.967742 | 72 | 0.644766 |
08483d8175b8bd82d7534d3c981f5d1467884db2 | 195 | py | Python | darzalib/Incoming/__init__.py | swrlly/Midnight | b4375002761a13a09a6c3085e9b34384b28227ba | [
"MIT"
] | 2 | 2021-11-18T13:38:52.000Z | 2021-11-19T04:15:24.000Z | darzalib/Incoming/__init__.py | swrlly/Midnight | b4375002761a13a09a6c3085e9b34384b28227ba | [
"MIT"
] | null | null | null | darzalib/Incoming/__init__.py | swrlly/Midnight | b4375002761a13a09a6c3085e9b34384b28227ba | [
"MIT"
] | null | null | null | from .BiomeDisplay import BiomeDisplay
from .Chats import Chats
from .PlayEffect import PlayEffect
from .Reconnect import Reconnect
from .SwapAck import SwapAck
from .UseItemAck import UseItemAck | 32.5 | 38 | 0.851282 |
08486cbf36ba6ba189128910a8b98a815a664466 | 938 | py | Python | python/17_letter_combinations_of_a_phone_number.py | dchapp/blind75 | aaa409cf2db4ef6d0f86177f4217eceeb391caa8 | [
"MIT"
] | null | null | null | python/17_letter_combinations_of_a_phone_number.py | dchapp/blind75 | aaa409cf2db4ef6d0f86177f4217eceeb391caa8 | [
"MIT"
] | null | null | null | python/17_letter_combinations_of_a_phone_number.py | dchapp/blind75 | aaa409cf2db4ef6d0f86177f4217eceeb391caa8 | [
"MIT"
] | null | null | null | num_to_letters = {
'2': ['a', 'b', 'c'],
'3': ['d', 'e', 'f'],
'4': ['g', 'h', 'i'],
'5': ['j', 'k', 'l'],
'6': ['m', 'n', 'o'],
'7': ['p', 'q', 'r', 's'],
'8': ['t', 'u', 'v'],
'9': ['w', 'x', 'y', 'z'],
}
| 28.424242 | 65 | 0.410448 |
084d1fd01b6f648a85848dd0310b96b0d8966a0e | 651 | py | Python | yotta/options/registry.py | microbit-foundation/yotta | 82d854b43d391abb5a006b05e7beffe7d0d6ffbf | [
"Apache-2.0"
] | 176 | 2015-01-02T07:31:59.000Z | 2022-03-21T12:40:02.000Z | yotta/options/registry.py | microbit-foundation/yotta | 82d854b43d391abb5a006b05e7beffe7d0d6ffbf | [
"Apache-2.0"
] | 549 | 2015-01-05T16:19:54.000Z | 2021-01-15T13:46:42.000Z | yotta/options/registry.py | microbit-foundation/yotta | 82d854b43d391abb5a006b05e7beffe7d0d6ffbf | [
"Apache-2.0"
] | 84 | 2015-01-10T21:01:00.000Z | 2022-03-24T16:04:42.000Z | # Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library options
from argparse import Action, SUPPRESS
| 27.125 | 70 | 0.680492 |
084edafd90972abf12ce9cf828ac494b0afdd467 | 4,453 | py | Python | src/pybraingym/environment.py | anetczuk/pybraingym | 4f930021d7802e88c75a1a0aed135dd4de66cc1b | [
"MIT"
] | null | null | null | src/pybraingym/environment.py | anetczuk/pybraingym | 4f930021d7802e88c75a1a0aed135dd4de66cc1b | [
"MIT"
] | null | null | null | src/pybraingym/environment.py | anetczuk/pybraingym | 4f930021d7802e88c75a1a0aed135dd4de66cc1b | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2019 Arkadiusz Netczuk <dev.arnet@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from pybrain.rl.environments.environment import Environment
from gym.spaces.discrete import Discrete
def action(self, actionValue):
"""Transform action value received from PyBrain and pass result to OpenAi Gym."""
return actionValue
def reward(self, rewardValue):
"""Transform reward value received from OpenAi Gym and pass result to PyBrain."""
return rewardValue
| 32.50365 | 104 | 0.651246 |
084eddbd29309d0a8c29e8b0baeae41ed4f83c9f | 7,420 | py | Python | logicscen.py | exposit/pythia-oracle | 60e4e806c9ed1627f2649822ab1901d28933daac | [
"MIT"
] | 32 | 2016-08-27T01:31:42.000Z | 2022-03-21T08:59:28.000Z | logicscen.py | exposit/pythia-oracle | 60e4e806c9ed1627f2649822ab1901d28933daac | [
"MIT"
] | 3 | 2016-08-27T00:51:47.000Z | 2019-08-26T13:23:04.000Z | logicscen.py | exposit/pythia-oracle | 60e4e806c9ed1627f2649822ab1901d28933daac | [
"MIT"
] | 10 | 2016-08-28T14:14:41.000Z | 2021-03-18T03:24:22.000Z | #!/usr/bin/env python
#-*- coding: utf-8 -*-
#---------------------------------------------------------------------------------------------------
# --> Logic to handle scenarios
#---------------------------------------------------------------------------------------------------
import imports
from imports import *
import config
import logic
from logic import *
| 27.279412 | 119 | 0.508491 |
0850f9781ec228546bf41eccc932a22fd036e4a8 | 7,980 | py | Python | datyy/views/projects.py | VladimirSiv/datyy | 4f3b54557850212ca3ce4c0d16cd56eb9989d7c4 | [
"MIT"
] | null | null | null | datyy/views/projects.py | VladimirSiv/datyy | 4f3b54557850212ca3ce4c0d16cd56eb9989d7c4 | [
"MIT"
] | null | null | null | datyy/views/projects.py | VladimirSiv/datyy | 4f3b54557850212ca3ce4c0d16cd56eb9989d7c4 | [
"MIT"
] | null | null | null | import dash
import dash_html_components as html
import dash_bootstrap_components as dbc
import numpy as np
from server import app
from dash.dependencies import Input, Output, State
from dash.exceptions import PreventUpdate
from components.cards import simple_info_card
from components.dropdowns import dropdown_single
from components.cards import project_info_card
from components.tables import simple_table
from components.gantts import simple_gantt_graph
from logic.dropdowns import dropdown_single_logic
from logic.tables import generate_project_tasks_data
from logic.pie_charts import sunburst_chart_logic
from logic.gantts import simple_gantt_logic
layout = html.Div(
children=[
html.Div(id="project-temp", style={"display": "none"}),
dbc.Row(
className="main-row",
children=[
dbc.Col(
dropdown_single(
id_="project-select",
placeholder="Select Project",
text="Project:",
),
width=3,
),
],
),
dbc.Row(
className="main-row",
children=[
dbc.Col(
simple_info_card(
id_="project-card-planning",
title="Planning",
)
),
dbc.Col(
simple_info_card(
id_="project-card-design",
title="Design",
)
),
dbc.Col(
simple_info_card(
id_="project-card-development",
title="Development",
)
),
dbc.Col(
simple_info_card(
id_="project-card-testing",
title="Testing",
)
),
dbc.Col(
simple_info_card(
id_="project-card-cost",
title="Cost",
)
),
dbc.Col(
simple_info_card(
id_="project-card-duration",
title="Duration",
)
),
],
),
dbc.Row(
className="main-row",
children=[
dbc.Col(
project_info_card(
id_="budget-graph",
title="Budget spending",
subcomponents={
"project-budget": "Budget",
"project-remaining": "Remaining",
"project-currently": "Currently",
},
),
width=6,
),
dbc.Col(
simple_table(
id_="project-tasks-table",
title="Overdue tasks",
columns=[
"Overdue (days)",
"Task",
"Deadline",
"Employee",
],
),
width=6,
),
],
),
html.Div(
className="main-row", children=[html.H4("Milestones", className="title-bold")]
),
dbc.Row(
className="main-row",
children=[dbc.Col(simple_gantt_graph(id_="project-gantt-graph"))],
),
]
)
| 28.098592 | 90 | 0.52193 |
08540bf2ac8cadc1cf3900bd14a8f79f2ba8831e | 146 | py | Python | week-02/appendA.py | norbertbodo91/pythonExercises | 9cd773c5d6ce3280d19a84ef12b8fd478ff09613 | [
"MIT"
] | null | null | null | week-02/appendA.py | norbertbodo91/pythonExercises | 9cd773c5d6ce3280d19a84ef12b8fd478ff09613 | [
"MIT"
] | null | null | null | week-02/appendA.py | norbertbodo91/pythonExercises | 9cd773c5d6ce3280d19a84ef12b8fd478ff09613 | [
"MIT"
] | null | null | null |
print(appendA(toAppend = raw_input("Enter a word to add an A letter: ")))
| 24.333333 | 73 | 0.684932 |
085550c02672da4291f033dfdf10337c089c2aa8 | 16,119 | py | Python | multiacctcf.py | DonMills/multiacct-CF-orchestrate | 4acce3c984c1801ff66cf9d210e3a0d1a6f9246b | [
"MIT"
] | 11 | 2017-07-19T07:05:44.000Z | 2022-02-07T19:35:51.000Z | multiacctcf.py | DonMills/multiacct-CF-orchestrate | 4acce3c984c1801ff66cf9d210e3a0d1a6f9246b | [
"MIT"
] | null | null | null | multiacctcf.py | DonMills/multiacct-CF-orchestrate | 4acce3c984c1801ff66cf9d210e3a0d1a6f9246b | [
"MIT"
] | 2 | 2017-07-19T15:01:52.000Z | 2022-02-07T19:35:53.000Z | #!/usr/bin/python
from __future__ import print_function
import threading
import boto3
import botocore
import argparse
from time import ctime
###############
# Some Global Vars
##############
lock = threading.Lock()
awsaccts = [{'acct': 'acct1ID',
'name': 'master',
'cffile': 'location of cloudformation file in S3'},
{'acct': 'acct2ID',
'name': 'dev',
'cffile': 'location of cloudformation file in S3'},
{'acct': 'acct3ID',
'name': 'staging',
'cffile': 'location of cloudformation file in S3'},
{'acct': 'acct4ID',
'name': 'test',
'cffile': 'location of cloudformation file in S3'},
{'acct': 'acct5ID',
'name': 'QA',
'cffile': 'location of cloudformation file in S3'}]
###################################
# This results dict is prepopulated with the info for the master vpc in a region. It will be overwritten
# if the master cloudform is run
###################################
results = {
'master': {
'CIDRblock': '172.0.1.0/22',
'RTBint': [
'rtb-xxxxxxxx',
'rtb-xxxxxxxx'],
'VPCID': 'vpc-xxxxxxxx'}}
threads = []
#######################
# The function that does CloudFormation and peering requests
#######################
if __name__ == '__main__':
main()
| 39.70197 | 179 | 0.470749 |
085769a397608c592ac48390d3b4d6b67aae08eb | 882 | py | Python | NIM/tests/woa_test.py | buctlab/source-seeking-multi-robot-team-simulator | a68c214b9bd19006a94c0adc832681bbaf0d6dc8 | [
"Apache-2.0"
] | null | null | null | NIM/tests/woa_test.py | buctlab/source-seeking-multi-robot-team-simulator | a68c214b9bd19006a94c0adc832681bbaf0d6dc8 | [
"Apache-2.0"
] | null | null | null | NIM/tests/woa_test.py | buctlab/source-seeking-multi-robot-team-simulator | a68c214b9bd19006a94c0adc832681bbaf0d6dc8 | [
"Apache-2.0"
] | null | null | null | import os
from Config import Config
from NIM.algorithms import WhaleOptimizationAlgorithm
from NIM.algorithms.algorithm import logger
if __name__ == '__main__':
with open(Config.default_saved_scene_path, 'r') as f:
data = f.read()
m2d = eval(data)
seed = 5
woa = WhaleOptimizationAlgorithm(m2d, Config.rasterized_cell_size, func=Config.func, iterations=Config.iterations,
debug=True, population=Config.number_of_robots, robot_size=Config.size, seed=seed,
k=Config.leakage_sources)
best_sol, best_val = woa.run()
logger.info("best sol:{sol}, best val:{val}".format(sol=best_sol, val=best_val))
func_name = type(woa.func).__name__
woa.iter_swarm_pos.to_csv(
os.path.join(Config.project_root, "data/csv_file/woa_MultiSourceFunction_" + str(seed) + ".csv"))
| 36.75 | 119 | 0.675737 |
085893c679735b22d323d01a1e71583ba759cc3a | 6,242 | py | Python | src/COVIDZejunDatagraphs.py | luisflores0330/ista131final | 168ac6afe666e945ae717387b50420804b33c4f3 | [
"Apache-2.0"
] | null | null | null | src/COVIDZejunDatagraphs.py | luisflores0330/ista131final | 168ac6afe666e945ae717387b50420804b33c4f3 | [
"Apache-2.0"
] | null | null | null | src/COVIDZejunDatagraphs.py | luisflores0330/ista131final | 168ac6afe666e945ae717387b50420804b33c4f3 | [
"Apache-2.0"
] | 4 | 2021-12-07T21:44:31.000Z | 2021-12-07T23:20:04.000Z | '''
File: COVIDZejunDatagraphs.py
Author: Zejun Li
Purpose: This file contains 12 different functions to make 5 different graphs about the COVID 19 in Idaho
'''
import pandas as pd, numpy as np, matplotlib.pyplot as plt
import matplotlib.dates as mdates
import datetime
import datetime as dt
def get_df():
'''
This function is to get the dataframe from the csv file : data_table_for_daily_death_trends__idaho.csv
'''
fname = "data_table_for_daily_death_trends__idaho.csv"
df = pd.read_csv(fname,sep=',', skiprows = 2, engine='python')
del df["State"]
df["Dates"] = np.nan
df['Dates'] = df['Date'].apply(date_convert)
del df["Date"]
return df
def get_date_lst():
'''This function is to get all of the dates from the Dates column
'''
df = get_df()
lst_dates = []
for i in df['Dates']:
lst_dates.append(i)
return lst_dates
def fig1():
'''This function is to make a line graph with x axis of Dates and y axis of Current Hospitalized COVID-19 Patients.
'''
df = get_df()
lst_dates = get_date_lst()
x = [dt.datetime.strptime(d,'%m/%d/%Y').date() for d in lst_dates]
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%m/%d/%Y'))
plt.gca().xaxis.set_major_locator(mdates.DayLocator(interval=50))
plt.plot(x,df['Current Hospitalized COVID-19 Patients'])
plt.gcf().autofmt_xdate()
plt.xlabel("Dates")
plt.ylabel("Current Hospitalized COVID-19 Patients")
plt.suptitle('Figure 1', fontsize=16)
def fig2():
'''This function is to make a bar chart with x axis of Dates and y axis of New Deaths
'''
df = get_df()
lst_dates = get_date_lst()
plt.figure(figsize=(10,10))
plt.style.use('ggplot')
lst_dates = []
for i in df['Dates']:
lst_dates.append(i)
x = [dt.datetime.strptime(d,'%m/%d/%Y').date() for d in lst_dates]
lst = []
for i in df['New Deaths']:
lst.append(i)
x_pos = [i for i, _ in enumerate(x)]
plt.bar(x,lst,width=0.8, color='darkviolet')
plt.xlabel("Dates")
plt.ylabel("New Deaths")
plt.suptitle('Figure 2', fontsize=16)
def fig3():
'''This function is to make a scatter plot with x axis of Dates and y axis of 7-Day Moving Avg
'''
df = get_df()
plt.figure(figsize=(16,10), dpi= 80)
lst_dates = get_date_lst()
lst = []
for i in df["7-Day Moving Avg"]:
lst.append(i)
int_lst = []
for i in range(len(lst_dates)):
int_lst.append(i)
x = np.array(lst_dates)
y = np.array(lst)
x1 = np.array(int_lst)
m, b = np.polyfit(x1, y, 1)
plt.plot(x, m*x1 + b)
plt.scatter(x, y)
plt.gca().xaxis.set_major_locator(mdates.DayLocator(interval=50))
plt.xlabel("Dates")
plt.ylabel("7-Day Moving Avg")
plt.gca().invert_xaxis()
plt.suptitle('Figure 3', fontsize=16)
main()
def csv(file):
'''
This function is to get two dataframes from the csv file; df: data_table_for_daily_case_trends__idaho1.csv; df2:data_table_for_daily_death_trends__idaho2.csv
'''
df = pd.read_csv(file, sep = ",", skiprows = 2)
df2 = pd.read_csv("data_table_for_daily_death_trends__idaho2.csv", sep = "," , skiprows = 2)
df["New Deaths"] = df2["New Deaths"]
df["Doses Per Day"] = 0
df["Dates"] = df["Date"].replace({"Jan":"01", "Feb":"02","Mar":"03","Apr":"04","May":"05","Jun":"06","Jul":"07","Aug":"08","Sep":"09","Oct":"10","Nov":"11","Dec":"12"}, regex = True)
df["Total Doses Administered"] = df["Total Doses Administered"].fillna(0)
for i in range(1, len(df["Total Doses Administered"])-1):
a = pd.to_numeric(df["Total Doses Administered"])
df.loc[i-1,"Doses Per Day"] = abs((int(a.iloc[i-1]) - int(a.iloc[i])))
a.append(df["Doses Per Day"])
df.drop(labels = [0], axis = 0)
df.drop([0, 1, 2], axis = 0,inplace = True)
del df["7-Day Moving Avg"]
del df["State"]
return df
def clean_dose():
'''This function is to delete the dates that don't have dose
'''
df = csv("data_table_for_daily_case_trends__idaho1.csv")
for i in range(626,670):
df = df.drop(index=i)
return df
def fig4():
'''This function is to make a line graph with x axis of Dates and y axis of New cases
'''
df = csv("data_table_for_daily_case_trends__idaho1.csv")
x = [dt.datetime.strptime(d,'%m %d %Y').date() for d in df["Dates"]]
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%m %d %Y'))
plt.gca().xaxis.set_major_locator(mdates.DayLocator(interval=50))
plt.plot(x,df['New Cases'])
plt.gcf().autofmt_xdate()
plt.xlabel("Dates")
plt.ylabel("New Cases")
plt.suptitle('Figure 4', fontsize=16)
'''
def fig5():
df = csv("data_table_for_daily_case_trends__idaho1.csv")
plt.figure(figsize=(10,10))
plt.style.use('ggplot')
lst_dates = []
for i in df['Dates']:
lst_dates.append(i)
x = [dt.datetime.strptime(d,'%m %d %Y').date() for d in df["Dates"]]
lst = []
for i in df['New Deaths']:
lst.append(i)
x_pos = [i for i, _ in enumerate(x)]
plt.bar(x,lst,width=0.8, color='black')
plt.xlabel("Dates")
plt.ylabel("New Deaths")
plt.suptitle('Figure 5', fontsize=16)
'''
def fig5():
'''This function is to make a bar chart with x axis of Dates and y axis of Doses Per Day
'''
df = clean_dose()
plt.figure(figsize=(16,10), dpi= 80)
lst = []
for i in df["Doses Per Day"]:
lst.append(i)
x = np.array(df["Dates"])
y = np.array(lst)
plt.gca().xaxis.set_major_locator(mdates.DayLocator(interval=50))
plt.bar(x,lst,width=0.8, color='navy')
plt.xlabel("Dates")
plt.ylabel("Doses Per Day")
plt.gca().invert_xaxis()
plt.suptitle('Figure 5', fontsize=16)
main2()
| 33.026455 | 187 | 0.603172 |
0858b5bc59305248e9f97a28c217e52f4157d9b4 | 1,118 | py | Python | tests/test_pipeline_disk_deduplication.py | kingking888/skyscraper | d710202f9581c3791d2cf7ee3ae33e950e46c0b7 | [
"MIT"
] | 1 | 2021-03-21T07:25:43.000Z | 2021-03-21T07:25:43.000Z | tests/test_pipeline_disk_deduplication.py | kingking888/skyscraper | d710202f9581c3791d2cf7ee3ae33e950e46c0b7 | [
"MIT"
] | null | null | null | tests/test_pipeline_disk_deduplication.py | kingking888/skyscraper | d710202f9581c3791d2cf7ee3ae33e950e46c0b7 | [
"MIT"
] | 1 | 2021-04-24T11:38:18.000Z | 2021-04-24T11:38:18.000Z | import pytest
import json
import datetime
from scrapy.spiders import Spider
import scrapy.exceptions
from skyscraper.items import BasicItem
from scrapy.exceptions import DropItem
from skyscraper.pipelines.filesystem import DiskDeduplicationPipeline
| 24.304348 | 74 | 0.686047 |
08597f4873dfad388e0eb75f921c519b7373d12a | 180 | py | Python | windscribe/__init__.py | Dayzpd/Python-Windscribe | eaaca7b39286434ec8588c967076f0b5b9961d91 | [
"MIT"
] | 9 | 2020-09-17T19:42:18.000Z | 2022-01-04T07:14:37.000Z | windscribe/__init__.py | Dayzpd/Python-Windscribe | eaaca7b39286434ec8588c967076f0b5b9961d91 | [
"MIT"
] | 4 | 2020-10-28T16:22:54.000Z | 2022-01-04T07:13:18.000Z | windscribe/__init__.py | Dayzpd/Python-Windscribe | eaaca7b39286434ec8588c967076f0b5b9961d91 | [
"MIT"
] | 4 | 2020-12-11T11:13:27.000Z | 2022-01-16T02:40:55.000Z | __all__ = [
'account',
'connect',
'locations',
'login',
'logout',
]
from .windscribe import (
account,
connect,
locations,
login,
logout,
) | 12 | 25 | 0.527778 |
085a588c3443a2133c8229f5612a92a5ee522cad | 335 | py | Python | src/videos/migrations/0009_rename_updated_timestamp_video_updated.py | imsubhamsingh/vibeon | 5ea67bb8dae0a0c28d36f81374eb4f046d842cf5 | [
"Apache-2.0"
] | null | null | null | src/videos/migrations/0009_rename_updated_timestamp_video_updated.py | imsubhamsingh/vibeon | 5ea67bb8dae0a0c28d36f81374eb4f046d842cf5 | [
"Apache-2.0"
] | 2 | 2021-07-19T18:41:46.000Z | 2022-02-10T11:43:07.000Z | src/videos/migrations/0009_rename_updated_timestamp_video_updated.py | imsubhamsingh/vibeon | 5ea67bb8dae0a0c28d36f81374eb4f046d842cf5 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 3.2 on 2021-04-20 19:34
from django.db import migrations
| 22.333333 | 80 | 0.674627 |
085b597e5e9aaf7c138a4db4c8f8739331aa2a66 | 2,342 | py | Python | SVM/SVM_Regression/Sklearn_SVM_Regression.py | Jojoxiao/Machine-Learning-for-Beginner-by-Python3 | 71b91c9cba5803bd78d4d31be6dabb1d3989e968 | [
"MIT"
] | 397 | 2018-05-28T02:07:32.000Z | 2022-03-30T09:53:37.000Z | SVM/SVM_Regression/Sklearn_SVM_Regression.py | 976634681/Machine-Learning-for-Beginner-by-Python3 | d9effcbb1b390dc608a0f4c0a28f0ad03892047a | [
"MIT"
] | 4 | 2019-01-14T16:41:02.000Z | 2021-03-11T13:23:06.000Z | SVM/SVM_Regression/Sklearn_SVM_Regression.py | 976634681/Machine-Learning-for-Beginner-by-Python3 | d9effcbb1b390dc608a0f4c0a28f0ad03892047a | [
"MIT"
] | 235 | 2018-06-28T05:31:40.000Z | 2022-03-11T03:20:07.000Z | # -*- codingutf-8 -*-
# &Author AnFany
# Sklearn
"""
"""
# PM2.5
import SVM_Regression_Data as rdata
#
from sklearn import svm
import numpy as np
import matplotlib.pyplot as plt
from pylab import mpl
mpl.rcParams['font.sans-serif'] = ['FangSong'] #
mpl.rcParams['axes.unicode_minus'] = False #
"""
"""
#
#
'''
linear, poly, rbf, sigmoid
'''
#
#
''''''
if __name__ == "__main__":
datasvr = rdata.model_data
realtr, outtri, realpre, poupre = result(datasvr, he='rbf')
huitu(realtr, outtri, c=['b', 'k'], sign='', cudu=1.5)
huitu(realpre, poupre, c=['b', 'k'], sign='', cudu=1.5)
| 22.519231 | 101 | 0.557643 |
085b8a0758f970cf513eb9555d20e921de2dbc2f | 1,655 | py | Python | tests/test_history.py | dfroger/conda | c0f99ff46b217d081501e66f4dcd7bcdb5d9c6aa | [
"BSD-3-Clause"
] | null | null | null | tests/test_history.py | dfroger/conda | c0f99ff46b217d081501e66f4dcd7bcdb5d9c6aa | [
"BSD-3-Clause"
] | null | null | null | tests/test_history.py | dfroger/conda | c0f99ff46b217d081501e66f4dcd7bcdb5d9c6aa | [
"BSD-3-Clause"
] | null | null | null | from os.path import dirname
import unittest
from .decorators import skip_if_no_mock
from .helpers import mock
from conda import history
| 31.826923 | 73 | 0.590937 |
085dd2a204aa1776b398919049eef21372b1d7e4 | 107 | py | Python | src/zojax/catalog/tests/__init__.py | Zojax/zojax.catalog | 1be9ef2cd4516d6c1dcfe7da52c4d438852e7ea6 | [
"ZPL-2.1"
] | null | null | null | src/zojax/catalog/tests/__init__.py | Zojax/zojax.catalog | 1be9ef2cd4516d6c1dcfe7da52c4d438852e7ea6 | [
"ZPL-2.1"
] | null | null | null | src/zojax/catalog/tests/__init__.py | Zojax/zojax.catalog | 1be9ef2cd4516d6c1dcfe7da52c4d438852e7ea6 | [
"ZPL-2.1"
] | null | null | null | # This file is necessary to make this directory a package.
from zojax.catalog.catalog import queryCatalog
| 26.75 | 58 | 0.813084 |
085e0152d8a979274c20816965dae9f9c36f8c65 | 6,066 | py | Python | src/bpp/views/raporty/ranking_autorow.py | iplweb/django-bpp | 85f183a99d8d5027ae4772efac1e4a9f21675849 | [
"BSD-3-Clause"
] | 1 | 2017-04-27T19:50:02.000Z | 2017-04-27T19:50:02.000Z | src/bpp/views/raporty/ranking_autorow.py | mpasternak/django-bpp | 434338821d5ad1aaee598f6327151aba0af66f5e | [
"BSD-3-Clause"
] | 41 | 2019-11-07T00:07:02.000Z | 2022-02-27T22:09:39.000Z | src/bpp/views/raporty/ranking_autorow.py | iplweb/bpp | f027415cc3faf1ca79082bf7bacd4be35b1a6fdf | [
"BSD-3-Clause"
] | null | null | null | # -*- encoding: utf-8 -*-
import itertools
try:
from django.core.urlresolvers import reverse
except ImportError:
from django.urls import reverse
from django.db.models.aggregates import Sum
from django.template.defaultfilters import safe
from django.utils.functional import cached_property
from django_tables2 import Column
from django_tables2.export.views import ExportMixin
from django_tables2.tables import Table
from django_tables2.views import SingleTableView
from bpp.models import Autor, Sumy, OpcjaWyswietlaniaField, Uczelnia
from bpp.models.struktura import Wydzial
def get_dostepne_wydzialy(self):
return Wydzial.objects.filter(zezwalaj_na_ranking_autorow=True)
def get_wydzialy(self):
base_query = self.get_dostepne_wydzialy()
wydzialy = self.request.GET.getlist("wydzialy[]")
if wydzialy:
try:
wydzialy = base_query.filter(pk__in=[int(x) for x in wydzialy])
return wydzialy
except (TypeError, ValueError):
pass
return base_query
def get_context_data(self, **kwargs):
context = super(SingleTableView, self).get_context_data(**kwargs)
context["od_roku"] = self.kwargs["od_roku"]
context["do_roku"] = self.kwargs["do_roku"]
jeden_rok = False
if self.kwargs["od_roku"] == self.kwargs["do_roku"]:
context["rok"] = self.kwargs["od_roku"]
jeden_rok = True
wydzialy = self.get_wydzialy()
context["wydzialy"] = wydzialy
if jeden_rok:
context["table_title"] = "Ranking autorw za rok %s" % context["rok"]
else:
context["table_title"] = "Ranking autorw za lata %s - %s" % (
context["od_roku"],
context["do_roku"],
)
context["tab_subtitle"] = ""
if len(wydzialy) != len(self.get_dostepne_wydzialy()):
context["table_subtitle"] = ", ".join([x.nazwa for x in wydzialy])
return context
def get_table_kwargs(self):
uczelnia = Uczelnia.objects.all().first()
pokazuj = uczelnia.pokazuj_liczbe_cytowan_w_rankingu
if pokazuj == OpcjaWyswietlaniaField.POKAZUJ_NIGDY or (
pokazuj == OpcjaWyswietlaniaField.POKAZUJ_ZALOGOWANYM
and self.request.user.is_anonymous
):
return {"exclude": ("liczba_cytowan_sum",)}
return {}
| 32.612903 | 88 | 0.616716 |
f22aabe1afa4a1593594ef47c8110872cb757c3c | 16,701 | py | Python | client-lib/pypi/nsrr/nsrr.py | nsrr/nsrr-cloud | a1e33bc3ba3220600e8b1973882d2ed76a7277c6 | [
"MIT"
] | null | null | null | client-lib/pypi/nsrr/nsrr.py | nsrr/nsrr-cloud | a1e33bc3ba3220600e8b1973882d2ed76a7277c6 | [
"MIT"
] | null | null | null | client-lib/pypi/nsrr/nsrr.py | nsrr/nsrr-cloud | a1e33bc3ba3220600e8b1973882d2ed76a7277c6 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import requests
from requests.structures import CaseInsensitiveDict
import json
import getpass
from pathlib import Path
import hashlib
import pandas as pd
import gzip
from multiprocessing import Process
# Global variables
#API_SERVER='https://dev-cloud.sleepdata.org/api/v1'
API_SERVER='https://cloud.sleepdata.org/api/v1'
#API_SERVER='http://localhost:9002/api/v1'
procs=[]
all_decompress_edfz=[]
| 42.496183 | 163 | 0.598946 |
f22b087ab319568e891a7406ef151ad2f4d6b818 | 509 | py | Python | assignment2.py | talsperre/random-walk | 5c810f571c9de28926850e1ad70ff4c29df9c0f4 | [
"MIT"
] | null | null | null | assignment2.py | talsperre/random-walk | 5c810f571c9de28926850e1ad70ff4c29df9c0f4 | [
"MIT"
] | null | null | null | assignment2.py | talsperre/random-walk | 5c810f571c9de28926850e1ad70ff4c29df9c0f4 | [
"MIT"
] | null | null | null | import numpy as np
N = 100
R = 10000
R_range = range(R)
size = (N, 3)
C = np.zeros((N, 3))
k = 1
print ("100")
print ("STEP: ", k)
for i in range(N):
print ("He ", C[i, 0], " ", C[i, 1], " ", C[i, 2])
k += 1
for j in range(R):
A = np.random.uniform(-1, 1, size)
B = np.sum(np.multiply(A, A), axis=1)
B = np.sqrt(B)
B = B.reshape(N, 1)
Norm_A = A / B
C += Norm_A
if j % 10 == 0:
print ("100")
print ("STEP: ", k)
for i in range(N):
print ("He ", C[i, 0], " ", C[i, 1], " ", C[i, 2])
k += 1 | 18.851852 | 53 | 0.489194 |
f22fac0a3ced91e4e4e5768a9d363783d0f24bd3 | 1,462 | py | Python | parallel/images_common.py | minrk/ipython-cse17 | 16a9059c7054a8bd4977a3cb8b09c100ea779069 | [
"BSD-3-Clause"
] | 3 | 2017-03-02T07:11:37.000Z | 2017-03-03T06:13:32.000Z | parallel/images_common.py | minrk/ipython-cse17 | 16a9059c7054a8bd4977a3cb8b09c100ea779069 | [
"BSD-3-Clause"
] | null | null | null | parallel/images_common.py | minrk/ipython-cse17 | 16a9059c7054a8bd4977a3cb8b09c100ea779069 | [
"BSD-3-Clause"
] | null | null | null | import os
import matplotlib.pyplot as plt
from skimage.io import imread
def plot_corners(img, corners, show=True):
"""Display the image and plot all contours found"""
plt.imshow(img, cmap='gray')
plt.plot(corners[:,1], corners[:,0], 'r+', markeredgewidth=1.5, markersize=8) # Plot corners
plt.axis('image')
plt.xticks([])
plt.yticks([])
if show:
plt.show()
def find_corners(path, min_distance=5):
"""Find corners in an image at path
Returns the image and the corner lists.
"""
from skimage.feature import corner_harris, corner_peaks
img = imread(path, flatten=True)
corners = corner_peaks(corner_harris(img), min_distance=min_distance)
return img, corners
def get_corners_image(path):
"""Given a path, return a PNG of the image with contour lines
Calls both find_contours and plot_contours
"""
from IPython.core.pylabtools import print_figure
img, corners = find_corners(path)
plot_corners(img, corners, show=False)
fig = plt.gcf()
pngdata = print_figure(fig)
plt.close(fig)
return pngdata
def get_pictures(pictures_dir):
"""Return a list of picture files found in pictures_dir"""
pictures = []
for directory, subdirs, files in os.walk(pictures_dir):
for fname in files:
if fname.lower().endswith(('.jpg', '.png')):
pictures.append(os.path.join(directory, fname))
return pictures
| 29.24 | 96 | 0.666211 |
f23153ff9da39e77238d222d2874c0c47b3effe7 | 1,765 | py | Python | tests/test_copies.py | mschmidtkorth/shallow-backup | 6629fed7d5a3a13eb068c7ef0168cfa8ffbd3bbf | [
"MIT"
] | 1 | 2021-07-25T19:26:47.000Z | 2021-07-25T19:26:47.000Z | tests/test_copies.py | mschmidtkorth/shallow-backup | 6629fed7d5a3a13eb068c7ef0168cfa8ffbd3bbf | [
"MIT"
] | null | null | null | tests/test_copies.py | mschmidtkorth/shallow-backup | 6629fed7d5a3a13eb068c7ef0168cfa8ffbd3bbf | [
"MIT"
] | null | null | null | import os
import sys
import pytest
import shutil
from .test_utils import setup_env_vars, unset_env_vars, BACKUP_DEST_DIR, FAKE_HOME_DIR, DIRS
sys.path.insert(0, "../shallow_backup")
from shallow_backup.utils import copy_dir_if_valid
TEST_TEXT_FILE = os.path.join(FAKE_HOME_DIR, 'test-file.txt')
| 32.090909 | 94 | 0.65779 |
f2320f768e412bebfaa5c2e31eeb4a3c480eacaf | 1,395 | py | Python | loan/killeragent.py | Casper-Smet/LOAN | 3aabf80cf4314bcba33779329fc6e4971b85e742 | [
"MIT"
] | null | null | null | loan/killeragent.py | Casper-Smet/LOAN | 3aabf80cf4314bcba33779329fc6e4971b85e742 | [
"MIT"
] | null | null | null | loan/killeragent.py | Casper-Smet/LOAN | 3aabf80cf4314bcba33779329fc6e4971b85e742 | [
"MIT"
] | null | null | null | from collections import namedtuple
import networkx as nx
from mesa import Agent, Model
| 34.875 | 128 | 0.658065 |
f23235dddab2a9fffc993f7fe1be533663c51d2b | 290 | py | Python | src/calc.py | ceIery/epic7-speed-calculator | 2f91e57117e2b6873772e6a703e47241570ab75f | [
"MIT"
] | null | null | null | src/calc.py | ceIery/epic7-speed-calculator | 2f91e57117e2b6873772e6a703e47241570ab75f | [
"MIT"
] | null | null | null | src/calc.py | ceIery/epic7-speed-calculator | 2f91e57117e2b6873772e6a703e47241570ab75f | [
"MIT"
] | null | null | null | """
Given a base speed value and a list of percentages, calculates the speed value
for each percentage
"""
| 24.166667 | 78 | 0.662069 |
f2330e7134a6c2ae1cacee5b851dbdfec9f5f1d4 | 11,762 | py | Python | src/magi/actions/base.py | personalrobotics/magipy | 6f86d6938168f580f667cfc093cf7e9f218e2853 | [
"BSD-3-Clause"
] | null | null | null | src/magi/actions/base.py | personalrobotics/magipy | 6f86d6938168f580f667cfc093cf7e9f218e2853 | [
"BSD-3-Clause"
] | 1 | 2018-01-06T00:24:06.000Z | 2018-01-06T00:24:06.000Z | src/magi/actions/base.py | personalrobotics/magipy | 6f86d6938168f580f667cfc093cf7e9f218e2853 | [
"BSD-3-Clause"
] | null | null | null | """Base classes, context managers, and exceptions for MAGI actions."""
from abc import ABCMeta, abstractmethod
import logging
from openravepy import KinBody, Robot
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.INFO)
class ExecutableSolution(object):
"""Abstract base class for executing post-processed solutions."""
__metaclass__ = ABCMeta
def __init__(self, solution):
"""
@param solution: Solution that generated this ExecutableSolution
"""
self.solution = solution
self.precondition = solution.precondition
self.postcondition = solution.postcondition
def to_key(obj):
"""
Return a tuple that uniquely identifies an object in an Environment.
The output of this function can be passed to from_key to find the
equivalent object in, potentially, a different OpenRAVE environment.
@param obj: object in an OpenRAVE environment
@return tuple that uniquely identifies the object
"""
if obj is None:
return None
elif isinstance(obj, (KinBody, Robot)):
key = obj.GetName(),
elif isinstance(obj, (KinBody.Joint, KinBody.Link)):
key = obj.GetParent().GetName(), obj.GetName()
elif isinstance(obj, Robot.Manipulator):
key = obj.GetRobot().GetName(), obj.GetName()
else:
raise TypeError('Unknown type "{!s}".'.format(type(obj)))
return (type(obj), ) + key
def from_key(env, key):
"""
Return the object identified by the input key in an Environment.
The input of this function is constructed by the to_key function.
@param env: an OpenRAVE environment
@param key: tuple that uniquely identifies the object
@return object in the input OpenRAVE environment
"""
if key is None:
return None
obj_type = key[0]
if issubclass(obj_type, (KinBody, Robot)):
return env.GetKinBody(key[1])
elif issubclass(obj_type, KinBody.Joint):
return env.GetKinBody(key[1]).GetJoint(key[2])
elif issubclass(obj_type, KinBody.Link):
return env.GetKinBody(key[1]).GetLink(key[2])
elif issubclass(obj_type, Robot.Manipulator):
return env.GetRobot(key[1]).GetManipulator(key[2])
else:
raise TypeError('Unknown type "{!s}".'.format(obj_type))
| 32.672222 | 85 | 0.655841 |
f233b62fa43bf27f7df361b2d0940e083df21551 | 6,471 | py | Python | src/core/python/core/io/od.py | railtoolkit/OpenLinTim | 27eba8b6038946ce162e9f7bbc0bd23045029d51 | [
"MIT"
] | null | null | null | src/core/python/core/io/od.py | railtoolkit/OpenLinTim | 27eba8b6038946ce162e9f7bbc0bd23045029d51 | [
"MIT"
] | null | null | null | src/core/python/core/io/od.py | railtoolkit/OpenLinTim | 27eba8b6038946ce162e9f7bbc0bd23045029d51 | [
"MIT"
] | null | null | null | from typing import List
from core.exceptions.input_exceptions import (InputFormatException,
InputTypeInconsistencyException)
from core.model.graph import Graph
from core.model.impl.fullOD import FullOD
from core.model.impl.mapOD import MapOD
from core.model.infrastructure import InfrastructureNode
from core.model.od import OD, ODPair
from core.io.csv import CsvReader, CsvWriter
from core.model.ptn import Stop, Link
from core.util.config import Config, default_config
| 44.020408 | 126 | 0.626642 |
f23575bb8b4e289c914a5be32dd736b94767c391 | 4,395 | py | Python | kriging/_kriging.py | ERSSLE/ordinary_kriging | f983081e4f12b0bae03bd042a6f451c65dcb2759 | [
"MIT"
] | 3 | 2020-09-08T16:55:44.000Z | 2021-12-04T15:35:07.000Z | kriging/_kriging.py | ERSSLE/ordinary_kriging | f983081e4f12b0bae03bd042a6f451c65dcb2759 | [
"MIT"
] | null | null | null | kriging/_kriging.py | ERSSLE/ordinary_kriging | f983081e4f12b0bae03bd042a6f451c65dcb2759 | [
"MIT"
] | 2 | 2021-08-25T09:35:50.000Z | 2021-12-07T08:19:11.000Z | # encoding: utf-8
"""
Ordinary Kriging interpolation is a linear estimation of regionalized variables.
It assumes that the data change into a normal distribution,
and considers that the expected value of regionalized variable Z is unknown.
The interpolation process is similar to the weighted sliding average,
and the weight value is determined by spatial data analysis.
"""
import numpy as np
from shapely.geometry import Polygon,Point,shape
from shapely.geometry.multipolygon import MultiPolygon
from shapely.prepared import prep
def shape_shadow(xgrid,ygrid,mapdata):
"""
Mask processing.
Parameters
----------
xgrid: grid coordinates of longitude.
ygrid: grid coordinates of latitude.
mapdata: array of map data.
Return
------
np.ndarray: An array of Boolean types.
"""
newshp = Polygon()
for shap in mapdata:
newshp = newshp.union(shape({'type':'Polygon','coordinates':[shap]}))
points = []
for xi,yi in zip(xgrid.ravel(),ygrid.ravel()):
points.append(Point([xi,yi]))
prep_newshp = prep(newshp)
mask = []
for p in points:
mask.append(bool(prep_newshp.contains(p)-1))
mask = np.array(mask).reshape(xgrid.shape)
return mask
def interpolate(xy,z,extension=1.2,point_counts=(100,100)):
"""
Interpolate through the Kriging class, and return the grid points
of the longitude and latitude interpolation results
Parameters
----------
xy: The latitude and longitude coordinates of a spatial data point.
z: The latitude and longitude coordinates of a spatial data point.
extension: The interpolating region is expanded to cover a wider area.
point_counts: How many data points to interpolate, default is 100 * 100.
"""
kri = Kriging()
kri.fit(xy,z)
x_max,x_min,y_max,y_min = xy[:,0].max(),xy[:,0].min(),xy[:,1].max(),xy[:,1].min()
p = (extension - 1.0)/2
x_s = x_min - (x_max-x_min)*p
x_e = x_max + (x_max-x_min)*p
y_s = y_min - (y_max-y_min)*p
y_e = y_max + (y_max-y_min)*p
xls = np.linspace(x_s,x_e,point_counts[0])
yls = np.linspace(y_s,y_e,point_counts[1])
xgrid,ygrid = np.meshgrid(xls,yls)
xgridls,ygridls = xgrid.ravel(),ygrid.ravel()
if len(xgridls) > 100000: # Consider memory limit loop handling.
zgridls = np.array([])
for s,e in zip(np.arange(0,len(xgridls),100000)[:-1],np.arange(0,len(xgridls),100000)[1:]):
zgridls = np.concatenate([zgridls,kri.predict(np.c_[xgridls[s:e],ygridls[s:e]])])
if e < len(xgridls):
zgridls = np.concatenate([zgridls,kri.predict(np.c_[xgridls[e:],ygridls[e:]])])
else:
zgridls = kri.predict(np.c_[xgridls,ygridls])
zgrid = zgridls.reshape(xgrid.shape)
return xgrid,ygrid,zgrid
| 35.731707 | 100 | 0.597952 |
f236ea30d7814e6e5f7e36351bc7667f7fad4f04 | 125 | py | Python | steam/utils/__init__.py | ivicel/steamkit-python | 0a3f250e432cf890965db5e7245841aa512bca22 | [
"Apache-2.0"
] | 5 | 2018-11-16T08:59:41.000Z | 2021-04-03T05:32:18.000Z | steam/utils/__init__.py | ivicel/steamkit-python | 0a3f250e432cf890965db5e7245841aa512bca22 | [
"Apache-2.0"
] | null | null | null | steam/utils/__init__.py | ivicel/steamkit-python | 0a3f250e432cf890965db5e7245841aa512bca22 | [
"Apache-2.0"
] | null | null | null | from .util import clear_proto_mask, is_proto_msg, add_proto_mask
__all__ = [clear_proto_mask, is_proto_msg, add_proto_mask] | 31.25 | 64 | 0.84 |
f2377bda1f457053d6b4f86097a8d1ba0041422b | 260 | py | Python | src/arm/src/iksolverservicetester.py | Busboombot/ros_idx6dof | 63b3a49393ab2c619b6b56c634cd440ab9b464ef | [
"MIT"
] | 1 | 2020-03-15T15:30:43.000Z | 2020-03-15T15:30:43.000Z | src/arm/src/iksolverservicetester.py | Busboombot/ros_idx6dof | 63b3a49393ab2c619b6b56c634cd440ab9b464ef | [
"MIT"
] | null | null | null | src/arm/src/iksolverservicetester.py | Busboombot/ros_idx6dof | 63b3a49393ab2c619b6b56c634cd440ab9b464ef | [
"MIT"
] | null | null | null | #!/usr/bin/python
import rospy
from arm.srv import IKService, IKServiceResponse
rospy.init_node("asdf", anonymous=True)
rospy.wait_for_service('IKService')
srv = rospy.ServiceProxy('IKService', IKService)
resp = srv([5, 16, 8, 0, 0, 0], None)
print resp
| 17.333333 | 48 | 0.734615 |
f23806bdb5c4b2e6ddeae98b2f41f0141fe5c5b9 | 1,410 | py | Python | crypto-scrapers/scrapers/spiders/coin_market_cap.py | chnsh/crypto-index-fund | 6c4122b868372ba99aba4f703e85d8ee12af07de | [
"MIT"
] | 14 | 2018-05-27T19:34:59.000Z | 2022-02-09T12:02:38.000Z | crypto-scrapers/scrapers/spiders/coin_market_cap.py | chnsh/crypto-index-fund | 6c4122b868372ba99aba4f703e85d8ee12af07de | [
"MIT"
] | 4 | 2018-05-28T02:44:07.000Z | 2022-03-02T14:55:20.000Z | crypto-scrapers/scrapers/spiders/coin_market_cap.py | chnsh/crypto-index-fund | 6c4122b868372ba99aba4f703e85d8ee12af07de | [
"MIT"
] | 1 | 2022-03-07T05:26:47.000Z | 2022-03-07T05:26:47.000Z | from datetime import datetime
from locale import *
import scrapy
from injector import Injector
from scrapers.items import CoinMarketCapItem
from scrapers.utils import UrlListGenerator
setlocale(LC_NUMERIC, '')
| 33.571429 | 76 | 0.565248 |
f23af2303a08de830f84db88bf6e00cef4e25589 | 4,361 | py | Python | crawler/cli.py | NicolasLM/crawler | 15ed6441fef3b68bfadc970f597271191fe66cf8 | [
"MIT"
] | null | null | null | crawler/cli.py | NicolasLM/crawler | 15ed6441fef3b68bfadc970f597271191fe66cf8 | [
"MIT"
] | null | null | null | crawler/cli.py | NicolasLM/crawler | 15ed6441fef3b68bfadc970f597271191fe66cf8 | [
"MIT"
] | null | null | null | from collections import OrderedDict
from urllib.parse import urlparse
import click
import rethinkdb as r
import redis
import crawler.conf as conf
# cli does not need to be thread-safe
conn = r.connect(host=conf.RethinkDBConf.HOST,
db=conf.RethinkDBConf.DB)
domains = r.table('domains')
def top(kind, count, data):
top = OrderedDict(sorted(data.items(), key=lambda t: -t[1]))
i = 1
click.secho('Top {} {}'.format(count, kind), bold=True)
for value, occurences in top.items():
if not value:
continue
click.echo('{:>15} {}'.format(value, occurences))
i += 1
if i > count:
break
| 33.806202 | 79 | 0.64022 |
f23b010b735f63cc59ac899de4d7a1e041082294 | 9,667 | py | Python | run.py | keyunluo/Pytorch-DDP | ff91affdd2c4cebe1719e9a46f118405c308fd1f | [
"Apache-2.0"
] | null | null | null | run.py | keyunluo/Pytorch-DDP | ff91affdd2c4cebe1719e9a46f118405c308fd1f | [
"Apache-2.0"
] | null | null | null | run.py | keyunluo/Pytorch-DDP | ff91affdd2c4cebe1719e9a46f118405c308fd1f | [
"Apache-2.0"
] | null | null | null | # -8*- coding: utf-8 -*-
import torch
import torch.nn as nn
import torch.optim as optim
import torch.multiprocessing as mp
import torch.backends.cudnn as cudnn
from torch.utils.data import DataLoader, Dataset
from torch.nn.parallel import DistributedDataParallel
from torch.utils.data.distributed import DistributedSampler
import argparse, random, time, os
import numpy as np
def parameter_parser():
parser = argparse.ArgumentParser(description="Run Model")
parser.add_argument("--seq_len",
type=int,
default=512,
help="max sequence length")
parser.add_argument("--ip",
type=str,
default="localhost",
help="ip address")
parser.add_argument("--port",
type=str,
default=str(random.randint(20000, 30000)),
help="port num")
parser.add_argument("--cuda_devices",
type=int,
nargs='+',
default=list(range(torch.cuda.device_count())),
help="cuda devices")
parser.add_argument("--mode",
type=str,
choices=["train", "eval"],
help="train or eval")
parser.add_argument("--num_worker",
type=int,
default=8,
help="number of data loader worker")
parser.add_argument("--batch_size",
type=int,
default=32,
help="batch size")
parser.add_argument("--epoch",
type=int,
default=5,
help="num epoch")
parser.add_argument("--max_norm",
type=int,
default=30,
help="max norm value")
return parser.parse_args()
def set_manual_seed(seed):
np.random.seed(seed)
torch.manual_seed(seed)
random.seed(seed)
cudnn.benchmark = False
cudnn.deterministic = True
def dist_init(ip, rank, local_rank, world_size, port):
"""
initialize data distributed
"""
host_addr_full = 'tcp://' + ip + ':' + str(port)
torch.distributed.init_process_group("nccl", init_method=host_addr_full, rank=rank, world_size=world_size)
torch.cuda.set_device(local_rank)
assert torch.distributed.is_initialized()
def init_weights(module):
if isinstance(module, nn.Linear):
nn.init.xavier_uniform_(module.weight.data)
nn.init.constant_(module.bias.data, 0.0)
elif isinstance(module, nn.LSTM):
nn.init.xavier_uniform_(module.weight_ih_l0.data)
nn.init.orthogonal_(module.weight_hh_l0.data)
nn.init.constant_(module.bias_ih_l0.data, 0.0)
nn.init.constant_(module.bias_hh_l0.data, 0.0)
hidden_size = module.bias_hh_l0.data.shape[0] // 4
module.bias_hh_l0.data[hidden_size:(2*hidden_size)] = 1.0
if module.bidirectional:
nn.init.xavier_uniform_(module.weight_ih_l0_reverse.data)
nn.init.orthogonal_(module.weight_hh_l0_reverse.data)
nn.init.constant_(module.bias_ih_l0_reverse.data, 0.0)
nn.init.constant_(module.bias_hh_l0_reverse.data, 0.0)
module.bias_hh_l0_reverse.data[hidden_size:(
2*hidden_size)] = 1.0
if __name__ == '__main__':
args = parameter_parser()
if args.mode == "train":
trainer()
| 39.618852 | 150 | 0.602359 |
f23c95d3f1d786e4a9f7ff9ea7ec7de8d8f85605 | 373 | py | Python | newsletter/urls.py | vallka/djellifique | fb84fba6be413f9d38276d89ae84aeaff761218f | [
"MIT"
] | null | null | null | newsletter/urls.py | vallka/djellifique | fb84fba6be413f9d38276d89ae84aeaff761218f | [
"MIT"
] | null | null | null | newsletter/urls.py | vallka/djellifique | fb84fba6be413f9d38276d89ae84aeaff761218f | [
"MIT"
] | null | null | null | from django.urls import path
from .views import *
app_name = 'newsletter'
urlpatterns = [
path('pixel/', my_image, name='pixel'),
path('click/<str:uuid>/', click_redirect, name='click'),
path('notification/', notification, name='notification'),
path('sendtest/<str:slug>', sendtest, name='sendtest'),
path('stats/<str:slug>', stats, name='stats'),
]
| 26.642857 | 61 | 0.659517 |
f23e9e3046350977154c8ce79c350de302fd2dce | 197 | py | Python | 04_While/Step03/gamjapark.py | StudyForCoding/BEAKJOON | 84e1c5e463255e919ccf6b6a782978c205420dbf | [
"MIT"
] | null | null | null | 04_While/Step03/gamjapark.py | StudyForCoding/BEAKJOON | 84e1c5e463255e919ccf6b6a782978c205420dbf | [
"MIT"
] | 3 | 2020-11-04T05:38:53.000Z | 2021-03-02T02:15:19.000Z | 04_While/Step03/gamjapark.py | StudyForCoding/BEAKJOON | 84e1c5e463255e919ccf6b6a782978c205420dbf | [
"MIT"
] | null | null | null | n = int(input())
temp_n = n
k=0
while True:
a = int(temp_n / 10)
b = temp_n % 10
c = (a + b) % 10
new = b*10 + c
k += 1
if new == n:
break
temp_n = new
print(k)
| 14.071429 | 24 | 0.446701 |
f23ec17cf55792ab6ef9150b36b5c3e6f5471fbb | 6,491 | py | Python | vesc_driver/src/mathdir/cubic_spline_planner.py | Taek-16/vesc_study | c4f8e56a2530b17622ca73e9eba57830a1b51ad9 | [
"Apache-2.0"
] | 1 | 2021-02-13T10:48:13.000Z | 2021-02-13T10:48:13.000Z | vesc_driver/src/mathdir/cubic_spline_planner.py | Taek-16/vesc_study | c4f8e56a2530b17622ca73e9eba57830a1b51ad9 | [
"Apache-2.0"
] | null | null | null | vesc_driver/src/mathdir/cubic_spline_planner.py | Taek-16/vesc_study | c4f8e56a2530b17622ca73e9eba57830a1b51ad9 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
cubic spline planner
Author: Atsushi Sakai
"""
import math
import numpy as np
import bisect
from scipy.spatial import distance
def calc_spline_course(x, y, ds=0.1):
sp = Spline2D(x, y)
s = list(np.arange(0, sp.s[-1], ds))
rx, ry, ryaw, rk, rdk = [], [], [], [], []
for i_s in s:
ix, iy = sp.calc_position(i_s)
rx.append(ix)
ry.append(iy)
ryaw.append(sp.calc_yaw(i_s))
rk.append(sp.calc_curvature(i_s))
rdk.append(sp.calc_d_curvature(i_s))
return rx, ry, ryaw, rk, rdk, s
def main():
print("Spline 2D test")
import matplotlib.pyplot as plt
import numpy as np
manhae1 = np.load(file='/home/menguiin/catkin_ws/src/macaron_2/path/K-CITY-garage-1m.npy')
x = manhae1[0:manhae1.shape[0]-1, 0]
y = manhae1[0:manhae1.shape[0]-1, 1]
rx, ry, ryaw, rk, rdk, s = calc_spline_course(x, y)
s = np.array(s)
flg, ax = plt.subplots(1)
plt.plot(range(-s.shape[0],s.shape[0],2),s, "s", label="s-value")
plt.grid(True)
plt.axis("equal")
plt.xlabel("index")
plt.ylabel("sval")
plt.legend()
flg, ax = plt.subplots(1)
plt.plot(x, y, "xb", label="input")
plt.plot(rx, ry, "-r", label="spline")
plt.grid(True)
plt.axis("equal")
plt.xlabel("x[m]")
plt.ylabel("y[m]")
plt.legend()
flg, ax = plt.subplots(1)
plt.plot(s, [math.degrees(iyaw) for iyaw in ryaw], "or", label="yaw")
plt.grid(True)
plt.legend()
plt.xlabel("line length[m]")
plt.ylabel("yaw angle[deg]")
flg, ax = plt.subplots(1)
plt.plot(s, rk, "-r", label="curvature")
plt.grid(True)
plt.legend()
plt.xlabel("line length[m]")
plt.ylabel("curvature [1/m]")
plt.show()
if __name__ == '__main__':
main()
| 24.130112 | 131 | 0.475582 |
f23ec9a0fbd46e6d9b5f8659349c47ab52aec354 | 333 | py | Python | bbtest/steps/appliance_steps.py | jancajthaml-openbank/e2e | a2ef84b6564022e95de76438fc795e2ef927aa2b | [
"Apache-2.0"
] | null | null | null | bbtest/steps/appliance_steps.py | jancajthaml-openbank/e2e | a2ef84b6564022e95de76438fc795e2ef927aa2b | [
"Apache-2.0"
] | 30 | 2018-03-18T05:58:32.000Z | 2022-01-19T23:21:31.000Z | bbtest/steps/appliance_steps.py | jancajthaml-openbank/e2e | a2ef84b6564022e95de76438fc795e2ef927aa2b | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from behave import *
from helpers.eventually import eventually
| 23.785714 | 82 | 0.744745 |
f23fb929e898694417f38446747b98726264f0e7 | 1,211 | py | Python | irkshop/urls.py | Beomi/irkshop | c109a62216cb6550add64fbf402883debc5011d1 | [
"Apache-2.0"
] | 19 | 2016-11-06T10:28:14.000Z | 2020-11-01T02:04:51.000Z | irkshop/urls.py | Beomi/irkshop | c109a62216cb6550add64fbf402883debc5011d1 | [
"Apache-2.0"
] | 17 | 2016-10-19T11:58:48.000Z | 2022-01-13T00:32:34.000Z | irkshop/urls.py | Beomi/irkshop | c109a62216cb6550add64fbf402883debc5011d1 | [
"Apache-2.0"
] | 4 | 2016-11-06T10:54:26.000Z | 2019-08-31T16:08:56.000Z | from django.conf.urls import url, include
from django.contrib import admin
from django.contrib.auth.views import login, logout
from django.conf import settings
from django.views.static import serve
from django.views.generic import TemplateView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/login/$', login,
{'template_name': 'login_page/login.html'}, name='login'),
url(r'^accounts/logout/$', logout, name='logout'),
url('', include('social_django.urls', namespace='social')), # .
url(r'^ckeditor/', include('ckeditor_uploader.urls')),
url(r'^ht/', include('health_check.urls')),
url(r'^paypal/', include('paypal.standard.ipn.urls')),
url(r'^shop/', include('goods.urls', namespace='shop')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
]
if settings.DEBUG:
import debug_toolbar
from django.conf.urls.static import static
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
url(r'^uploads/(?P<path>.*)$', serve, {'document_root': settings.MEDIA_ROOT}),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| 41.758621 | 101 | 0.663088 |
f240eb401196f0b66c32fe422e4a7253f5e5528f | 1,469 | py | Python | mojave_setup/fonts.py | RuchirChawdhry/macOS-Mojave-Setup | 5e61fe8c20abc42e63fcbd1c7e310aab8cc02a1c | [
"MIT"
] | null | null | null | mojave_setup/fonts.py | RuchirChawdhry/macOS-Mojave-Setup | 5e61fe8c20abc42e63fcbd1c7e310aab8cc02a1c | [
"MIT"
] | null | null | null | mojave_setup/fonts.py | RuchirChawdhry/macOS-Mojave-Setup | 5e61fe8c20abc42e63fcbd1c7e310aab8cc02a1c | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import subprocess as sp
| 22.953125 | 80 | 0.479238 |
f2414f0188cf0460b22148b0732eea50d4b58390 | 5,142 | py | Python | plag/urls.py | neetu6860/plagiarism-detection-software | 7f05210aafdccf33a2bc732a40348eff43f46fba | [
"MIT"
] | 19 | 2018-09-03T09:10:20.000Z | 2021-12-24T13:52:18.000Z | plag/urls.py | neetu6860/plagiarism-detection-software | 7f05210aafdccf33a2bc732a40348eff43f46fba | [
"MIT"
] | 3 | 2019-10-31T18:42:38.000Z | 2021-06-10T21:37:23.000Z | plag/urls.py | neetu6860/plagiarism-detection-software | 7f05210aafdccf33a2bc732a40348eff43f46fba | [
"MIT"
] | 16 | 2018-06-06T15:04:59.000Z | 2022-03-29T04:53:07.000Z | from django.conf.urls import patterns, url
from django.contrib import admin
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
admin.autodiscover()
from plag import views, const
urlpatterns = patterns('',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^index-trial/$', views.IndexTrialView.as_view(), name='index_trial'),
url(r'^download/(?P<prot_res_id>\d+)$', views.download_file, name='download'),
url(r'^products/$', TemplateView.as_view(template_name='plag/static/products.html'),
name='products'),
url(r'^features-screenshots/$',
TemplateView.as_view(template_name='plag/static/features_and_screenshots.html'),
name='features'),
url(r'^url-protection/$', TemplateView.as_view(template_name='plag/static/url_protection.html'),
name='url_prot'),
url(r'^document-protection/$',
TemplateView.as_view(template_name='plag/static/doc_protection.html'), name='doc_prot'),
url(r'^pricing/$', TemplateView.as_view(template_name='plag/static/pricing.html'),
name='pricing'),
url(r'^risks-of-plagiarism/$',
TemplateView.as_view(template_name='plag/static/risks_of_plagiarism.html'),
name='risks_plag'),
url(r'^about-us/$', TemplateView.as_view(template_name='plag/static/about.html'), name='about'),
url(r'^our-customers/$', TemplateView.as_view(template_name='plag/static/our_customers.html'),
name='our_customers'),
url(r'^contact-us/$', TemplateView.as_view(template_name='plag/static/contact_us.html'),
name='contact'),
url(r'^order/$', views.OrderView.as_view(), name='order'),
url(r'^ajax/username-check/$', views.username_unique, name='ajax_username_unique'),
url(r'^account/$', views.account, name='account'),
url(r'^account/profile/$', login_required(views.ProfileView.as_view()), name='profile'),
url(r'^account/invoice/(?P<pk>\d+)$', views.invoice, name='invoice'),
url(r'^account/invoice/pay/(?P<pk>\d+)$', views.pay_invoice, name='pay_invoice'),
url(r'^account/invoice/subscribe/(?P<pk>\d+)$', views.subscribe_invoice,
name='subscribe_invoice'),
url(r'^ipn-endpoint/$', views.ipn, name='ipn'),
url(r'^account/recent-scans/$', views.recent_scans, name='recent_scans_default'),
url(r'^account/recent-scans/(?P<num_days>\d+)$', views.recent_scans,
name='recent_scans'),
url(r'^account/recent-scans/(?P<num_days>\d+)/(?P<hide_zero>hide-zero)$',
views.recent_scans, name='recent_scans_hide_zero'),
url(r'^account/scan-history/$', views.scan_history, name='scan_history'),
url(r'^account/scan-history/(?P<hide_zero>hide-zero)$', views.scan_history,
name='scan_history_hide_zero'),
url(r'^ajax/plag-results/$', views.plagiarism_results,
name='ajax_plag_results_default'),
url(r'^ajax/plag-results/(?P<scan_id>\d+)$', views.plagiarism_results,
name='plag_results'),
url(r'^ajax/sitemap/$', views.sitemap_to_urls, name='ajax_urls'),
url(r'^account/protected-resources/$',
login_required(views.ProtectedResources.as_view()), name='protected_resources'),
url(r'^sitemap/$', TemplateView.as_view(template_name='plag/static/sitemap.html'),
name='sitemap'),
url(r'^terms-of-service/$',
TemplateView.as_view(template_name='plag/static/terms_of_service.html'),
name='terms_of_service'),
url(r'^privacy-policy/$', TemplateView.as_view(template_name='plag/static/privacy_policy.html'),
name='privacy_policy'),
# TODO Remove
url(r'^data-cleanse/$', views.data_cleanse, name='data_cleanse'),
url(r'^copyright/$', TemplateView.as_view(template_name='plag/static/copyright.html'),
name='copyright'),
url(r'^login/$', 'django.contrib.auth.views.login',
{'template_name': 'plag/static/login_error.html'}),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': 'index'}, name='logout'),
)
| 57.775281 | 119 | 0.532283 |
f242795159bdd7a9675c51d4615c5d8382e738be | 2,419 | py | Python | coinut.py | DanHenry4/AutoCoinut | f7c79f19a9193bc7c1193712088ca836b030f523 | [
"MIT"
] | 4 | 2016-05-08T02:15:50.000Z | 2020-07-01T08:16:45.000Z | coinut.py | DanHenry4/AutoCoinut | f7c79f19a9193bc7c1193712088ca836b030f523 | [
"MIT"
] | null | null | null | coinut.py | DanHenry4/AutoCoinut | f7c79f19a9193bc7c1193712088ca836b030f523 | [
"MIT"
] | null | null | null | import hmac
import hashlib
import json
import uuid
import httplib2
COINUT_URL = 'https://coinut.com/api/'
| 27.488636 | 76 | 0.574618 |
f2430c615c25842a6a15c7289e5e98e1e77f49ce | 1,817 | py | Python | src/neighborly/core/residence.py | ShiJbey/neighborly | 5af1e3211f1ef0e25803790850e7cd3d3a49be69 | [
"MIT"
] | null | null | null | src/neighborly/core/residence.py | ShiJbey/neighborly | 5af1e3211f1ef0e25803790850e7cd3d3a49be69 | [
"MIT"
] | null | null | null | src/neighborly/core/residence.py | ShiJbey/neighborly | 5af1e3211f1ef0e25803790850e7cd3d3a49be69 | [
"MIT"
] | null | null | null | from typing import Any, Dict
from ordered_set import OrderedSet
from neighborly.core.ecs import Component
from neighborly.core.engine import AbstractFactory, ComponentDefinition
| 32.446429 | 85 | 0.63071 |
f2439cb603c2e5bb9b0700a3b097f6415267d55a | 15,518 | py | Python | tests/SBHRun_Environment.py | SD2E/synbiohub_adapter | 492f9ef1054b17d790654310b895bb7ad155808e | [
"MIT"
] | 1 | 2019-10-08T20:31:16.000Z | 2019-10-08T20:31:16.000Z | tests/SBHRun_Environment.py | SD2E/synbiohub_adapter | 492f9ef1054b17d790654310b895bb7ad155808e | [
"MIT"
] | 84 | 2018-03-06T16:02:30.000Z | 2020-09-01T18:17:54.000Z | tests/SBHRun_Environment.py | SD2E/synbiohub_adapter | 492f9ef1054b17d790654310b895bb7ad155808e | [
"MIT"
] | 1 | 2019-02-06T17:17:54.000Z | 2019-02-06T17:17:54.000Z |
import threading
import time
import pandas as pd
import numpy as np
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import os
import fnmatch
import random
import re
import getpass
import sys
from rdflib import Graph
from synbiohub_adapter.SynBioHubUtil import *
from sbol import *
"""
This class will perform unit testing to query information from SynBioHub's instances.
Installation Requirement(s):
- This test environment requires two third party packages to display plot:
1. pip install pandas
2. python -mpip install -U matplotlib
To run this python file, enter in the following command from the synbiohub_adapter directory:
python -m tests.SBHRun_Environment
author(s) :Tramy Nguyen
"""
def get_uniqueID(idPrefix):
"""Generates a unique id
"""
t = time.ctime()
uid = '_'.join([idPrefix, t])
return re.sub(r'[: ]', '_', uid)
def create_sbolDocs(numDocs, idPrefix, sbolFile):
"""Returns a list of SBOL Documents
numDocs: An integer value to indicate how many SBOL documents this method should create
idPrefix: A unique id prefix to set each SBOL document
sbolFile: the SBOL file to create an SBOL document from
"""
sbolDoc_List = []
sbolTriples = []
u_counter = 0
for i in range(0, numDocs):
uid = get_uniqueID(idPrefix + "_d" + str(i))
trip_obj = SBOLTriple(sbolFile, uid)
sbolTriples.append(trip_obj)
sbolDoc_List.append(trip_obj.sbolDoc())
print("created doc%s" % i)
return sbolDoc_List, sbolTriples
def get_randomFile(sbolFiles):
"""Returns the full path of a randomly selected SBOL file found in the given directory
dirLocation: The directory to select a random SBOL file from
"""
selectedFile = random.choice(sbolFiles)
return selectedFile
def get_sbolList(dirLocation):
"""Returns a list of xml file found in the given directory
"""
for root, dir, files in os.walk(dirLocation):
sbolFiles = [os.path.abspath(os.path.join(root, fileName)) for fileName in files]
return sbolFiles
def push_sbh(sbolDoc, sbh_connector):
"""Returns the time (seconds) it takes to make a push to a new Collection on SynBioHub
sbh_connector: An instance of pySBOL's PartShop needed to perform login
for pushing and pulling data to and from SynBioHub
sbolURI: The URI of the SynBioHub collection or the specific part to be fetched
"""
start = time.clock()
result = sbh_connector.submit(sbolDoc)
end = time.clock()
print(result)
if result != 'Successfully uploaded':
sys.exit()
return end - start
def pull_sbh(sbh_connector, sbolURI):
"""Returns the time (seconds) it takes to make a pull from an existing SynBioHub Collection
sbh_connector: An instance of pySBOL's PartShop needed to perform login
for pushing and pulling data to and from SynBioHub
sbolURI: The URI of the SynBioHub collection or the specific part to be fetched
"""
sbolDoc = Document()
setHomespace("https://bbn.com")
start = time.clock()
sbh_connector.pull(sbolURI, sbolDoc)
end = time.clock()
if sbolDoc is None:
print("Found nothing and caused no error.")
else:
experimentalData_tl = []
for tl in sbolDoc:
if topLevel.type == 'http://sd2e.org#ExperimentalData':
experimentalData_tl.append(topLevel)
if len(experimentalData_tl) != 74:
print("Found the wrong SynBioHub Part with this uri: %s" % sbolURI)
return end - start
if __name__ == '__main__':
server_name = "https://synbiohub.bbn.com"
print("Logging into: " + server_name)
sbh_connector = PartShop(server_name)
sbh_user = input('Enter Username: ')
sbh_connector.login(sbh_user, getpass.getpass(prompt='Enter SynBioHub Password: ', stream=sys.stderr))
# Config.setOption("verbose", True)
# sbolFiles = get_sbolList("./examples/workingFiles")
sbolFiles = ["./examples/c_trips40000.xml"]
iterations = 1
sbolDoc_size = 1
br_speed(sbh_connector, sbolDoc_size, sbolFiles)
# br_triples(sbh_connector, iterations, sbolFiles)
# iterations, set_size=10, t_growthRate=5, sbolDoc_size=100
# TODO: MAKE SURE TO CHANGE COLOR OF BAR GRAPH TO MAKE IT LOOK COOL...
# br_setThread(sbh_connector, 3, 5, 3, 50, sbolFiles)
| 33.661605 | 110 | 0.660974 |
f245528c941762eda827c561627c5aa634c97c9f | 2,842 | py | Python | setup.py | Unidata/drilsdown | 55aca7168fb390f31c36729605401564e9b82c56 | [
"MIT"
] | 3 | 2018-05-25T00:19:12.000Z | 2021-01-08T15:54:36.000Z | setup.py | suvarchal/drilsdown | e82f58396f640fef847353caf1bd4b2bf016c7a6 | [
"MIT"
] | 11 | 2017-10-31T20:15:24.000Z | 2019-12-16T21:01:55.000Z | setup.py | suvarchal/drilsdown | e82f58396f640fef847353caf1bd4b2bf016c7a6 | [
"MIT"
] | 10 | 2018-02-08T22:23:28.000Z | 2019-09-29T23:25:19.000Z | import os
from six import iteritems
from setuptools import setup
from setuptools.command.develop import develop
from setuptools.command.install import install
import subprocess
PACKAGE_NAME = 'drilsdown'
SOURCES = {
'ipython_IDV': 'projects/ipython_IDV',
'idv_teleport': 'projects/IDV_teleport',
'ramadda_publish': 'projects/RAMADDA_publish',
}
VERSION = '2.4.91'
def install_drilsdown_projects(sources, develop=False):
""" Use pip to install all drilsdown projects. """
print("installing all drilsdown projects in {} mode".format(
"development" if develop else "normal"))
wd = os.getcwd()
for k, v in iteritems(sources):
try:
os.chdir(os.path.join(wd, v))
if develop:
subprocess.check_call(['pip', 'install', '-e', '.']) # could be pip3 on certain platforms
else:
subprocess.check_call(['pip', 'install', '.']) # could be pip3 on certain platforms
except Exception as e:
print("Oops, something went wrong installing", k)
print(e)
finally:
os.chdir(wd)
setup(
name=PACKAGE_NAME,
version=VERSION,
author="Drilsdown team",
author_email="drilsdown@unidata.ucar.edu",
description="A collection of tools for jupyter notebooks",
long_description_content_type='text/markdown',
long_description=open('README.md').read(),
url="https://github.com/Unidata/drilsdown",
license="MIT",
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
install_requires=[
'future',
'six',
'requests',
'ipython',
'ipywidgets>=7.1.0rc',
'jupyter-client',
# 'ipython_IDV>=' + VERSION + "'", # cannot be source and a dependency??
'ipython-IDV', # from pypi
'ramadda_publish', #from pypi
'idv_teleport', #from pypi
],
cmdclass={
#'install': InstallCmd, # do not overwrite for now to make
# pip install and python setup.py install do same.
# note in class pip might be called pip3 on certain platforms
'develop': DevelopCmd,
},
extras_require={
'addons': ['numpy','netcdf4','xarray','metpy'],
'visual': ['pyviz','geoviews'],
}
)
| 32.295455 | 105 | 0.60943 |
f24567e433386b2908e8d4a58f10fb0b2a6b3b98 | 2,129 | py | Python | ejercicios/Ejercicio6.py | Xavitheforce/Ejercicios_Iteracion | e840439e1277b5946592128d5c771d895c2fac2c | [
"Apache-2.0"
] | null | null | null | ejercicios/Ejercicio6.py | Xavitheforce/Ejercicios_Iteracion | e840439e1277b5946592128d5c771d895c2fac2c | [
"Apache-2.0"
] | null | null | null | ejercicios/Ejercicio6.py | Xavitheforce/Ejercicios_Iteracion | e840439e1277b5946592128d5c771d895c2fac2c | [
"Apache-2.0"
] | null | null | null | from datetime import datetime | 39.425926 | 184 | 0.622828 |
f2470b57f1baf4a7e69d418b396753a2d81c5b04 | 752 | py | Python | authentik/sources/saml/migrations/0011_auto_20210324_0736.py | BeryJu/passbook | 350f0d836580f4411524614f361a76c4f27b8a2d | [
"MIT"
] | 15 | 2020-01-05T09:09:57.000Z | 2020-11-28T05:27:39.000Z | authentik/sources/saml/migrations/0011_auto_20210324_0736.py | BeryJu/passbook | 350f0d836580f4411524614f361a76c4f27b8a2d | [
"MIT"
] | 302 | 2020-01-21T08:03:59.000Z | 2020-12-04T05:04:57.000Z | authentik/sources/saml/migrations/0011_auto_20210324_0736.py | BeryJu/passbook | 350f0d836580f4411524614f361a76c4f27b8a2d | [
"MIT"
] | 3 | 2020-03-04T08:21:59.000Z | 2020-08-01T20:37:18.000Z | # Generated by Django 3.1.7 on 2021-03-24 07:36
import django.db.models.deletion
from django.db import migrations, models
| 28.923077 | 78 | 0.62633 |
f248957a375715c7681a4295ca66a47a10ee7ea3 | 6,891 | py | Python | tempest/tests/common/test_service_clients.py | xavpaice/tempest | 958bd694df27511e0346d799876fe49331b8145c | [
"Apache-2.0"
] | null | null | null | tempest/tests/common/test_service_clients.py | xavpaice/tempest | 958bd694df27511e0346d799876fe49331b8145c | [
"Apache-2.0"
] | null | null | null | tempest/tests/common/test_service_clients.py | xavpaice/tempest | 958bd694df27511e0346d799876fe49331b8145c | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import random
import six
from tempest.services.baremetal.v1.json import baremetal_client
from tempest.services.data_processing.v1_1 import data_processing_client
from tempest.services.database.json import flavors_client as db_flavor_client
from tempest.services.database.json import versions_client as db_version_client
from tempest.services.identity.v2.json import identity_client as \
identity_v2_identity_client
from tempest.services.identity.v3.json import credentials_client
from tempest.services.identity.v3.json import endpoints_client
from tempest.services.identity.v3.json import identity_client as \
identity_v3_identity_client
from tempest.services.identity.v3.json import policies_client
from tempest.services.identity.v3.json import regions_client
from tempest.services.identity.v3.json import services_client
from tempest.services.image.v1.json import images_client
from tempest.services.image.v2.json import images_client as images_v2_client
from tempest.services.messaging.json import messaging_client
from tempest.services.network.json import network_client
from tempest.services.object_storage import account_client
from tempest.services.object_storage import container_client
from tempest.services.object_storage import object_client
from tempest.services.orchestration.json import orchestration_client
from tempest.services.telemetry.json import alarming_client
from tempest.services.telemetry.json import telemetry_client
from tempest.services.volume.v1.json.admin import hosts_client \
as volume_hosts_client
from tempest.services.volume.v1.json.admin import quotas_client \
as volume_quotas_client
from tempest.services.volume.v1.json.admin import services_client \
as volume_services_client
from tempest.services.volume.v1.json.admin import types_client \
as volume_types_client
from tempest.services.volume.v1.json import availability_zone_client \
as volume_az_client
from tempest.services.volume.v1.json import backups_client
from tempest.services.volume.v1.json import extensions_client \
as volume_extensions_client
from tempest.services.volume.v1.json import qos_client
from tempest.services.volume.v1.json import snapshots_client
from tempest.services.volume.v1.json import volumes_client
from tempest.services.volume.v2.json.admin import hosts_client \
as volume_v2_hosts_client
from tempest.services.volume.v2.json.admin import quotas_client \
as volume_v2_quotas_client
from tempest.services.volume.v2.json.admin import services_client \
as volume_v2_services_client
from tempest.services.volume.v2.json.admin import types_client \
as volume_v2_types_client
from tempest.services.volume.v2.json import availability_zone_client \
as volume_v2_az_client
from tempest.services.volume.v2.json import backups_client \
as volume_v2_backups_client
from tempest.services.volume.v2.json import extensions_client \
as volume_v2_extensions_client
from tempest.services.volume.v2.json import qos_client as volume_v2_qos_client
from tempest.services.volume.v2.json import snapshots_client \
as volume_v2_snapshots_client
from tempest.services.volume.v2.json import volumes_client as \
volume_v2_volumes_client
from tempest.tests import base
| 47.524138 | 79 | 0.753882 |
f2490fc27568d943c3ececc3e75fce355b5da3ff | 3,497 | py | Python | advent/days/day17/day.py | RuedigerLudwig/advent2021 | ce069d485bb34b4752ec4e89f195f7cc8cf084cc | [
"Unlicense"
] | null | null | null | advent/days/day17/day.py | RuedigerLudwig/advent2021 | ce069d485bb34b4752ec4e89f195f7cc8cf084cc | [
"Unlicense"
] | null | null | null | advent/days/day17/day.py | RuedigerLudwig/advent2021 | ce069d485bb34b4752ec4e89f195f7cc8cf084cc | [
"Unlicense"
] | null | null | null | from __future__ import annotations
from itertools import product
from typing import Iterator
day_num = 17
Range = tuple[int, int]
XStepRange = tuple[int, int | None]
YStepRange = tuple[int, int]
Pos = tuple[int, int]
| 31.223214 | 82 | 0.501859 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.