hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e05d022e20ec708234ba466419ce63a57d30ac77
| 2,716
|
py
|
Python
|
PythonScripting/NumbersInPython.py
|
Neo-sunny/pythonProgs
|
a9d2359d8a09d005d0ba6f94d7d256bf91499793
|
[
"MIT"
] | null | null | null |
PythonScripting/NumbersInPython.py
|
Neo-sunny/pythonProgs
|
a9d2359d8a09d005d0ba6f94d7d256bf91499793
|
[
"MIT"
] | null | null | null |
PythonScripting/NumbersInPython.py
|
Neo-sunny/pythonProgs
|
a9d2359d8a09d005d0ba6f94d7d256bf91499793
|
[
"MIT"
] | null | null | null |
"""
Demonstration of numbers in Python
"""
# Python has an integer type called int
print("int")
print("---")
print(0)
print(1)
print(-3)
print(70383028364830)
print("")
# Python has a real number type called float
print("float")
print("-----")
print(0.0)
print(7.35)
print(-43.2)
print("")
# Limited precision
print("Precision")
print("---------")
print(4.56372883832331773)
print(1.23456789012345678)
print("")
# Scientific/exponential notation
print("Scientific notation")
print("-------------------")
print(5e32)
print(999999999999999999999999999999999999999.9)
print("")
# Infinity
print("Infinity")
print("--------")
print(1e500)
print(-1e500)
print("")
# Conversions
print("Conversions between numeric types")
print("---------------------------------")
print(float(3))
print(float(99999999999999999999999999999999999999))
print(int(3.0))
print(int(3.7))
print(int(-3.7))
"""
Demonstration of simple arithmetic expressions in Python
"""
# Unary + and -
print("Unary operators")
print(+3)
print(-5)
print(+7.86)
print(-3348.63)
print("")
# Simple arithmetic
print("Addition and Subtraction")
print(1 + 2)
print(48 - 89)
print(3.45 + 2.7)
print(87.3384 - 12.35)
print(3 + 6.7)
print(9.8 - 4)
print("")
print("Multiplication")
print(3 * 2)
print(7.8 * 27.54)
print(7 * 8.2)
print("")
print("Division")
print(8 / 2)
print(3 / 2)
print(7.538 / 14.3)
print(8 // 2)
print(3 // 2)
print(7.538 // 14.3)
print("")
print("Exponentiation")
print(3 ** 2)
print(5 ** 4)
print(32.6 ** 7)
print(9 ** 0.5)
"""
Demonstration of compound arithmetic expressions in Python
"""
# Expressions can include multiple operations
print("Compound expressions")
print(3 + 5 + 7 + 27)
#Operator with same precedence are evaluated from left to right
print(18 - 6 + 4)
print("")
# Operator precedence defines how expressions are evaluated
print("Operator precedence")
print(7 + 3 * 5)
print(5.5 * 6 // 2 + 8)
print(-3 ** 2)
print("")
# Use parentheses to change evaluation order
print("Grouping with parentheses")
print((7 + 3) * 5)
print(5.5 * ((6 // 2) + 8))
print((-3) ** 2)
"""
Demonstration of the use of variables and how to assign values to
them.
"""
# The = operator can be used to assign values to variables
bakers_dozen = 12 + 1
temperature = 93
# Variables can be used as values and in expressions
print(temperature, bakers_dozen)
print("celsius:", (temperature - 32) * 5 / 9)
print("fahrenheit:", float(temperature))
# You can assign a different value to an existing variable
temperature = 26
print("new value:", temperature)
# Multiple variables can be used in arbitrary expressions
offset = 32
multiplier = 5.0 / 9.0
celsius = (temperature - offset) * multiplier
print("celsius value:", celsius)
| 17.522581
| 65
| 0.674521
| 387
| 2,716
| 4.728682
| 0.30491
| 0.036066
| 0.022951
| 0.032787
| 0.065574
| 0.05847
| 0.05847
| 0.05847
| 0.05847
| 0.05847
| 0
| 0.122475
| 0.143225
| 2,716
| 154
| 66
| 17.636364
| 0.663945
| 0.238218
| 0
| 0.120879
| 0
| 0
| 0.183106
| 0.017984
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.934066
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
e05e6c4440c357c867a4c38e37f726c4d615e768
| 1,676
|
py
|
Python
|
3DBeam/source/solving_strategies/strategies/linear_solver.py
|
JoZimmer/Beam-Models
|
e701c0bae6e3035e7a07cc590da4a132b133dcff
|
[
"BSD-3-Clause"
] | null | null | null |
3DBeam/source/solving_strategies/strategies/linear_solver.py
|
JoZimmer/Beam-Models
|
e701c0bae6e3035e7a07cc590da4a132b133dcff
|
[
"BSD-3-Clause"
] | null | null | null |
3DBeam/source/solving_strategies/strategies/linear_solver.py
|
JoZimmer/Beam-Models
|
e701c0bae6e3035e7a07cc590da4a132b133dcff
|
[
"BSD-3-Clause"
] | 1
|
2022-01-05T17:32:32.000Z
|
2022-01-05T17:32:32.000Z
|
from source.solving_strategies.strategies.solver import Solver
class LinearSolver(Solver):
def __init__(self,
array_time, time_integration_scheme, dt,
comp_model,
initial_conditions,
force,
structure_model):
super().__init__(array_time, time_integration_scheme, dt,
comp_model, initial_conditions, force, structure_model)
def _print_solver_info(self):
print("Linear Solver")
def solve(self):
# time loop
for i in range(0, len(self.array_time)):
self.step = i
current_time = self.array_time[i]
#print("time: {0:.2f}".format(current_time))
self.scheme.solve_single_step(self.force[:, i])
# appending results to the list
self.displacement[:, i] = self.scheme.get_displacement()
self.velocity[:, i] = self.scheme.get_velocity()
self.acceleration[:, i] = self.scheme.get_acceleration()
# TODO: only calculate reaction when user wants it
# if self.structure_model is not None:
# self.dynamic_reaction[:, i] = self._compute_reaction()
# reaction computed in dynamic analysis
# TODO: only calculate reaction when user wants it
# moved reaction computation to dynamic analysis level
# AK . this doesnt considers the support reaction check
#if self.structure_model is not None:
# self.dynamic_reaction[:, i] = self._compute_reaction()
# update results
self.scheme.update()
| 38.976744
| 80
| 0.590095
| 183
| 1,676
| 5.180328
| 0.404372
| 0.052743
| 0.041139
| 0.044304
| 0.390295
| 0.390295
| 0.390295
| 0.390295
| 0.305907
| 0.305907
| 0
| 0.00265
| 0.324582
| 1,676
| 42
| 81
| 39.904762
| 0.834806
| 0.318616
| 0
| 0
| 0
| 0
| 0.011525
| 0
| 0
| 0
| 0
| 0.02381
| 0
| 1
| 0.142857
| false
| 0
| 0.047619
| 0
| 0.238095
| 0.095238
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e068d2bbe0be95225acd32e5324a05a51bc85276
| 5,641
|
py
|
Python
|
pandas 9 - Statistics Information on data sets.py
|
PythonProgramming/Pandas-Basics-with-2.7
|
a6ecd5ac7c25dba83e934549903f229de89290d3
|
[
"MIT"
] | 10
|
2015-07-16T05:46:10.000Z
|
2020-10-28T10:35:50.000Z
|
pandas 9 - Statistics Information on data sets.py
|
PythonProgramming/Pandas-Basics-with-2.7
|
a6ecd5ac7c25dba83e934549903f229de89290d3
|
[
"MIT"
] | null | null | null |
pandas 9 - Statistics Information on data sets.py
|
PythonProgramming/Pandas-Basics-with-2.7
|
a6ecd5ac7c25dba83e934549903f229de89290d3
|
[
"MIT"
] | 9
|
2017-01-31T18:57:25.000Z
|
2019-09-10T08:52:57.000Z
|
import pandas as pd
from pandas import DataFrame
df = pd.read_csv('sp500_ohlc.csv', index_col = 'Date', parse_dates=True)
df['H-L'] = df.High - df.Low
# Giving us count (rows), mean (avg), std (standard deviation for the entire
# set), minimum for the set, maximum for the set, and some %s in that range.
print( df.describe())
x = input('enter to cont')
# gives us correlation data. Remember the 3d chart we plotted?
# now you can see if correlation of H-L and Volume also is correlated
# with price swings. Correlations for your correlations
print( df.corr())
x = input('enter to cont')
# covariance... now plenty of people know what correlation is, but what in the
# heck is covariance.
# Let's defined the two.
# covariance is the measure of how two variables change together.
# correlation is the measure of how two variables move in relation to eachother.
# so covariance is a more direct assessment of the relationship between two variables.
# Maybe a better way to put it is that covariance is the measure of the strength of correlation.
print( df.cov())
x = input('enter to cont')
print( df[['Volume','H-L']].corr())
x = input('enter to cont')
# see how it makes a table?
# so now, we can actually perform a service that some people actually pay for
# I once had a short freelance gig doing this
# so a popular form of analysis within especially forex is to compare correlations between
# the currencies. The idea here is that you pace one currency with another.
#
import datetime
import pandas.io.data
C = pd.io.data.get_data_yahoo('C',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
AAPL = pd.io.data.get_data_yahoo('AAPL',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
MSFT = pd.io.data.get_data_yahoo('MSFT',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
TSLA = pd.io.data.get_data_yahoo('TSLA',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
print( C.head())
x = input('enter to cont')
del C['Open']
# , 'high', 'low', 'close', 'volume'
del C['High']
del C['Low']
del C['Close']
del C['Volume']
corComp = C
corComp.rename(columns={'Adj Close': 'C'}, inplace=True)
corComp['AAPL'] = AAPL['Adj Close']
corComp['MSFT'] = MSFT['Adj Close']
corComp['TSLA'] = TSLA['Adj Close']
print( corComp.head())
x = input('enter to cont')
print( corComp.corr())
x = input('enter to cont')
C = pd.io.data.get_data_yahoo('C',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
AAPL = pd.io.data.get_data_yahoo('AAPL',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
MSFT = pd.io.data.get_data_yahoo('MSFT',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
TSLA = pd.io.data.get_data_yahoo('TSLA',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
BAC = pd.io.data.get_data_yahoo('BAC',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
BBRY = pd.io.data.get_data_yahoo('BBRY',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
CMG = pd.io.data.get_data_yahoo('CMG',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
EBAY = pd.io.data.get_data_yahoo('EBAY',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
JPM = pd.io.data.get_data_yahoo('JPM',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
SBUX = pd.io.data.get_data_yahoo('SBUX',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
TGT = pd.io.data.get_data_yahoo('TGT',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
WFC = pd.io.data.get_data_yahoo('WFC',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
x = input('enter to cont')
print( C.head())
del C['Open']
# , 'high', 'low', 'close', 'volume'
del C['High']
del C['Low']
del C['Close']
del C['Volume']
corComp = C
corComp.rename(columns={'Adj Close': 'C'}, inplace=True)
corComp['BAC'] = BAC['Adj Close']
corComp['MSFT'] = MSFT['Adj Close']
corComp['TSLA'] = TSLA['Adj Close']
corComp['AAPL'] = AAPL['Adj Close']
corComp['BBRY'] = BBRY['Adj Close']
corComp['CMG'] = CMG['Adj Close']
corComp['EBAY'] = EBAY['Adj Close']
corComp['JPM'] = JPM['Adj Close']
corComp['SBUX'] = SBUX['Adj Close']
corComp['TGT'] = TGT['Adj Close']
corComp['WFC'] = WFC['Adj Close']
print( corComp.head())
x = input('enter to cont')
print( corComp.corr())
x = input('enter to cont')
fancy = corComp.corr()
fancy.to_csv('bigmoney.csv')
| 32.606936
| 96
| 0.565148
| 762
| 5,641
| 4.135171
| 0.217848
| 0.162488
| 0.040622
| 0.055855
| 0.619486
| 0.603301
| 0.506506
| 0.488099
| 0.488099
| 0.488099
| 0
| 0.053617
| 0.29906
| 5,641
| 172
| 97
| 32.796512
| 0.743298
| 0.204928
| 0
| 0.730769
| 0
| 0
| 0.104508
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.038462
| 0
| 0.038462
| 0.096154
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e07a13e1121d2676a50044d556f0800f60bfd2f7
| 2,849
|
py
|
Python
|
team_fundraising/text.py
|
namtel-hp/fundraising-website
|
30cb0cd2bd4505454295d11715e70712525234a3
|
[
"MIT"
] | 5
|
2019-10-26T12:41:31.000Z
|
2022-03-13T08:30:29.000Z
|
team_fundraising/text.py
|
Maalik1/fundraising-website
|
a5fcd7e8a5966f299f57c22af8c739a3d6cd501a
|
[
"MIT"
] | 9
|
2021-03-18T21:27:36.000Z
|
2022-03-11T23:42:46.000Z
|
team_fundraising/text.py
|
Maalik1/fundraising-website
|
a5fcd7e8a5966f299f57c22af8c739a3d6cd501a
|
[
"MIT"
] | 2
|
2021-01-11T14:19:01.000Z
|
2022-02-18T19:18:38.000Z
|
class Donation_text:
# Shown as a message across the top of the page on return from a donation
# used in views.py:new_donation()
thank_you = (
"Thank you for your donation. "
"You may need to refresh this page to see the donation."
)
confirmation_email_subject = (
'Thank you for donating to the Triple Crown for Heart! '
)
# Start of the email sent confirming the paypal payment has gone through
# used in paypal.py:process_paypal()
confirmation_email_opening = (
'Thank you for your donation of '
)
# Closing of the email sent confirming the paypal payment has gone through
# used in paypal.py:process_paypal()
confirmation_email_closing = (
'.\n\nFor all donations over $20, you will receive a tax receipt for '
'the 2019 tax year.'
'\nYour PayPal receipt should arrive in a separate email.\n'
)
notification_email_subject = (
"You got a donation!"
)
notification_email_opening = (
"Great news! You've just received a donation of "
)
notification_email_closing = (
"\n\nAwesome work! They would probably appreciate "
"a quick thank you email.\n\n"
"-- Triple Crown for Heart\n"
)
class Fundraiser_text:
# Subject of the email sent on signup
signup_email_subject = (
"Welcome to fundraising for the Triple Crown for Heart!"
)
# Start of the email sent when someone signs up
# used in views.py:signup()
signup_email_opening = (
"Thanks for signing up to fundraise with us!\n"
"Your fundraising page can be found at:\n"
)
# Closing of the email sent when someone signs up
# used in views.py:signup()
signup_email_closing = (
'\n\nYou can change your information by using the "Login" link at the '
'top of that page.'
'\n\nThe easiest way to start fundraising is to post the above link '
'on social media or write a short email to your friends telling them '
'about your ride.'
'\nDon\'t forget to include the link to your page!\n'
)
# Message show at the top of the fundraiser page after signing up
# used in views.py:signup()
signup_return_message = (
"Thank you for signing up. Sharing your fundraiser page on social "
"media or over email is the best way to get donations."
)
signup_wrong_password_existing_user = (
"The username already exists, but the password entered is incorrect. "
"If you were already a fundraiser for a previous campaign, please "
"enter your previous password or use "
"<a href='/team_fundraising/accounts/password_reset/'>"
"Forgot your password</a>. If this is your first campaign, "
"please choose a different username."
)
| 33.916667
| 79
| 0.65251
| 397
| 2,849
| 4.602015
| 0.38539
| 0.019157
| 0.027367
| 0.038314
| 0.252326
| 0.219486
| 0.219486
| 0.204707
| 0.204707
| 0.204707
| 0
| 0.002921
| 0.279045
| 2,849
| 83
| 80
| 34.325301
| 0.886563
| 0.206739
| 0
| 0
| 0
| 0
| 0.577728
| 0.022272
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.090909
| 0
| 0
| 0.254545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0eb2577f85f04e68e802521ef8915750223e0174
| 624
|
py
|
Python
|
tests/wagtail_live/test_apps.py
|
wagtail/wagtail-live
|
dd769be089d457cf36db2506520028bc5f506ac3
|
[
"BSD-3-Clause"
] | 22
|
2021-06-07T20:36:18.000Z
|
2022-03-29T01:48:58.000Z
|
tests/wagtail_live/test_apps.py
|
wagtail/wagtail-live
|
dd769be089d457cf36db2506520028bc5f506ac3
|
[
"BSD-3-Clause"
] | 73
|
2021-05-21T16:08:44.000Z
|
2022-03-20T23:59:59.000Z
|
tests/wagtail_live/test_apps.py
|
wagtail/wagtail-live
|
dd769be089d457cf36db2506520028bc5f506ac3
|
[
"BSD-3-Clause"
] | 11
|
2021-06-10T10:05:13.000Z
|
2022-02-12T13:31:34.000Z
|
from django.apps import apps
from django.test import override_settings
from wagtail_live.signals import live_page_update
def test_live_page_update_signal_receivers():
assert len(live_page_update.receivers) == 0
@override_settings(
WAGTAIL_LIVE_PUBLISHER="tests.testapp.publishers.DummyWebsocketPublisher"
)
def test_live_page_update_signal_receivers_websocket():
app_config = apps.get_app_config("wagtail_live")
app_config.ready()
try:
# Receiver should be connected, no IndexError
receiver = live_page_update.receivers[0]
finally:
live_page_update.disconnect(receiver)
| 27.130435
| 77
| 0.780449
| 80
| 624
| 5.725
| 0.4625
| 0.104803
| 0.183406
| 0.065502
| 0.262009
| 0.157205
| 0.157205
| 0
| 0
| 0
| 0
| 0.003774
| 0.150641
| 624
| 22
| 78
| 28.363636
| 0.860377
| 0.06891
| 0
| 0
| 0
| 0
| 0.103627
| 0.082902
| 0
| 0
| 0
| 0
| 0.066667
| 1
| 0.133333
| false
| 0
| 0.2
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0eb6190157c1946b37b5fd1be18f551d0e559832
| 612
|
py
|
Python
|
python/Patterns/inheritance/main.py
|
zinderud/ysa
|
e34d3f4c7afab3976d86f5d27edfcd273414e496
|
[
"Apache-2.0"
] | null | null | null |
python/Patterns/inheritance/main.py
|
zinderud/ysa
|
e34d3f4c7afab3976d86f5d27edfcd273414e496
|
[
"Apache-2.0"
] | 1
|
2017-12-27T10:09:22.000Z
|
2017-12-27T10:22:47.000Z
|
python/Patterns/inheritance/main.py
|
zinderud/ysa
|
e34d3f4c7afab3976d86f5d27edfcd273414e496
|
[
"Apache-2.0"
] | null | null | null |
class Yaratik(object):
def move_left(self):
print('Moving left...')
def move_right(self):
print('Moving left...')
class Ejderha(Yaratik):
def Ates_puskurtme(self):
print('ates puskurtum!')
class Zombie(Yaratik):
def Isirmak(self):
print('Isirdim simdi!')
enemy = Yaratik()
enemy.move_left()
# ejderha also includes all functions from parent class (yaratik)
ejderha = Ejderha()
ejderha.move_left()
ejderha.Ates_puskurtme()
# Zombie is called the (child class), inherits from Yaratik (parent class)
zombie = Zombie()
zombie.move_right()
zombie.Isirmak()
| 18
| 74
| 0.679739
| 76
| 612
| 5.381579
| 0.394737
| 0.08802
| 0.07335
| 0.09291
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.19281
| 612
| 33
| 75
| 18.545455
| 0.827935
| 0.222222
| 0
| 0.105263
| 0
| 0
| 0.120507
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0
| 0
| 0.368421
| 0.210526
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0eb71b68b065b14b8eebff52fa3bbffc15201b7a
| 1,527
|
py
|
Python
|
clustering/graph_utils.py
|
perathambkk/ml-techniques
|
5d6fd122322342c0b47dc65d09c4425fd73f2ea9
|
[
"MIT"
] | null | null | null |
clustering/graph_utils.py
|
perathambkk/ml-techniques
|
5d6fd122322342c0b47dc65d09c4425fd73f2ea9
|
[
"MIT"
] | null | null | null |
clustering/graph_utils.py
|
perathambkk/ml-techniques
|
5d6fd122322342c0b47dc65d09c4425fd73f2ea9
|
[
"MIT"
] | null | null | null |
"""
Author: Peratham Wiriyathammabhum
"""
import numpy as np
import pandas as pd
from sklearn.neighbors import NearestNeighbors
def affinity_graph(X):
'''
This function returns a numpy array.
'''
ni, nd = X.shape
A = np.zeros((ni, ni))
for i in range(ni):
for j in range(i+1, ni):
dist = ((X[i] - X[j])**2).sum() # compute L2 distance
A[i][j] = dist
A[j][i] = dist # by symmetry
return A
def knn_graph(X, knn=4):
'''
This function returns a numpy array.
'''
ni, nd = X.shape
nbrs = NearestNeighbors(n_neighbors=(knn+1), algorithm='ball_tree').fit(X)
distances, indices = nbrs.kneighbors(X)
A = np.zeros((ni, ni))
for dist, ind in zip(distances, indices):
i0 = ind[0]
for i in range(1,knn+1):
d = dist[i]
A[i0, i] = d
A[i, i0] = d # by symmetry
return A
def sparse_affinity_graph(X):
'''
TODO: This function returns a numpy sparse matrix.
'''
ni, nd = X.shape
A = np.zeros((ni, ni))
for i in range(ni):
for j in range(i+1, ni):
dist = ((X[i] - X[j])**2).sum() # compute L2 distance
A[i][j] = dist
A[j][i] = dist # by symmetry
return A
def laplacian_graph(X, mode='affinity', knn=3, eta=0.01, sigma=2.5):
'''
The unnormalized graph Laplacian, L = D − W.
'''
if mode == 'affinity':
W = affinity_graph(X)
W[abs(W) > eta] = 0
elif mode == 'nearestneighbor':
W = knn_graph(X, knn=knn)
elif mode == 'gaussian':
W = affinity_graph(X)
bandwidth = 2.0*(sigma**2)
W = np.exp(W) / bandwidth
else:
pass
D = np.diag(W.sum(axis=1))
L = D - W
return L
| 21.814286
| 75
| 0.614276
| 268
| 1,527
| 3.466418
| 0.309701
| 0.04521
| 0.06028
| 0.064586
| 0.383208
| 0.334769
| 0.318622
| 0.318622
| 0.318622
| 0.318622
| 0
| 0.020781
| 0.212181
| 1,527
| 70
| 76
| 21.814286
| 0.750623
| 0.183366
| 0
| 0.4375
| 0
| 0
| 0.040067
| 0
| 0
| 0
| 0
| 0.014286
| 0
| 1
| 0.083333
| false
| 0.020833
| 0.0625
| 0
| 0.229167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ec1afd2facbda8f3febe8ca1dc7c71fb6558f04
| 1,993
|
py
|
Python
|
packages/watchmen-data-kernel/src/watchmen_data_kernel/meta/external_writer_service.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-data-kernel/src/watchmen_data_kernel/meta/external_writer_service.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-data-kernel/src/watchmen_data_kernel/meta/external_writer_service.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
from typing import Optional
from watchmen_auth import PrincipalService
from watchmen_data_kernel.cache import CacheService
from watchmen_data_kernel.common import DataKernelException
from watchmen_data_kernel.external_writer import find_external_writer_create, register_external_writer_creator
from watchmen_meta.common import ask_meta_storage, ask_snowflake_generator
from watchmen_meta.system import ExternalWriterService as ExternalWriterStorageService
from watchmen_model.common import ExternalWriterId
from watchmen_model.system import ExternalWriter
def register_external_writer(external_writer: ExternalWriter) -> None:
create = find_external_writer_create(external_writer.type)
if create is None:
raise DataKernelException(f'Creator not found for external writer[{external_writer.dict()}].')
register_external_writer_creator(external_writer.writerCode, create())
class ExternalWriterService:
def __init__(self, principal_service: PrincipalService):
self.principalService = principal_service
def find_by_id(self, writer_id: ExternalWriterId) -> Optional[ExternalWriter]:
external_writer = CacheService.external_writer().get(writer_id)
if external_writer is not None:
if external_writer.tenantId != self.principalService.get_tenant_id():
raise DataKernelException(
f'External writer[id={writer_id}] not belongs to '
f'current tenant[id={self.principalService.get_tenant_id()}].')
register_external_writer(external_writer)
return external_writer
storage_service = ExternalWriterStorageService(
ask_meta_storage(), ask_snowflake_generator(), self.principalService)
storage_service.begin_transaction()
try:
# noinspection PyTypeChecker
external_writer: ExternalWriter = storage_service.find_by_id(writer_id)
if external_writer is None:
return None
CacheService.external_writer().put(external_writer)
register_external_writer(external_writer)
return external_writer
finally:
storage_service.close_transaction()
| 41.520833
| 110
| 0.831912
| 236
| 1,993
| 6.694915
| 0.275424
| 0.23038
| 0.06962
| 0.070886
| 0.210127
| 0.148101
| 0.070886
| 0.070886
| 0
| 0
| 0
| 0
| 0.104365
| 1,993
| 47
| 111
| 42.404255
| 0.885154
| 0.013046
| 0
| 0.105263
| 0
| 0
| 0.086514
| 0.053944
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0
| 0.236842
| 0
| 0.421053
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ec2983c9be55e068e1ac3a8da9a2e78b097ece9
| 882
|
py
|
Python
|
scrywarden/module.py
|
chasebrewsky/scrywarden
|
c6a5a81d14016ca58625df68594ef52dd328a0dd
|
[
"MIT"
] | 1
|
2020-12-13T00:49:51.000Z
|
2020-12-13T00:49:51.000Z
|
scrywarden/module.py
|
chasebrewsky/scrywarden
|
c6a5a81d14016ca58625df68594ef52dd328a0dd
|
[
"MIT"
] | null | null | null |
scrywarden/module.py
|
chasebrewsky/scrywarden
|
c6a5a81d14016ca58625df68594ef52dd328a0dd
|
[
"MIT"
] | null | null | null |
from importlib import import_module
from typing import Any
def import_string(path: str) -> Any:
"""Imports a dotted path name and returns the class/attribute.
Parameters
----------
path: str
Dotted module path to retrieve.
Returns
-------
Class/attribute at the given import path.
Raises
------
ImportError
If the path does not exist.
"""
try:
module_path, class_name = path.rsplit('.', 1)
except ValueError as error:
raise ImportError(
f"{path} does not look like a module path",
) from error
module = import_module(module_path)
try:
return getattr(module, class_name)
except AttributeError as error:
raise ImportError(
f"Module '{module_path}' does not define a '{class_name}' "
"attribute/class",
) from error
| 24.5
| 71
| 0.603175
| 105
| 882
| 4.980952
| 0.428571
| 0.095602
| 0.063098
| 0.087954
| 0.091778
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001623
| 0.301587
| 882
| 35
| 72
| 25.2
| 0.847403
| 0.278912
| 0
| 0.352941
| 0
| 0
| 0.19105
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.352941
| 0
| 0.470588
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
0ec3f460313d8f825c0daad58ff5e76ef71c5401
| 1,704
|
py
|
Python
|
Win/reg.py
|
QGB/QPSU
|
7bc214676d797f42d2d7189dc67c9377bccdf25d
|
[
"MIT"
] | 6
|
2018-03-25T20:05:21.000Z
|
2022-03-13T17:23:05.000Z
|
Win/reg.py
|
pen9un/QPSU
|
76e1a3f6f6f6f78452e02f407870a5a32177b667
|
[
"MIT"
] | 15
|
2018-05-14T03:30:21.000Z
|
2022-03-03T15:33:25.000Z
|
Win/reg.py
|
pen9un/QPSU
|
76e1a3f6f6f6f78452e02f407870a5a32177b667
|
[
"MIT"
] | 1
|
2021-07-15T06:23:45.000Z
|
2021-07-15T06:23:45.000Z
|
#coding=utf-8
try:
if __name__.startswith('qgb.Win'):
from .. import py
else:
import py
except Exception as ei:
raise ei
raise EnvironmentError(__name__)
if py.is2():
import _winreg as winreg
from _winreg import *
else:
import winreg
from winreg import *
def get(skey,name,root=HKEY_CURRENT_USER,returnType=True):
''' from qgb.Win import reg
reg.get(r'Software\Microsoft\Windows\CurrentVersion\Internet Settings','ProxyEnable')
reg.get(r'HKLM\SYSTEM\CurrentControlSet\Services\LanmanServer\Parameters\Size' )
There are seven predefined root keys, traditionally named according to their constant handles defined in the Win32 API
skey不能包含 name,否则 FileNotFoundError: [WinError 2] 系统找不到指定的文件。
'''
r = OpenKey(root,skey)
r = QueryValueEx(r,name)
if returnType:return r[0],'{} : {}'.format(REG_TYPE[r[1]],r[1])
else :return r[0]
def set(skey,name,value,root=HKEY_CURRENT_USER,type='auto,or REG_TYPE int',returnType=True):
r = OpenKey(root,skey,0,KEY_SET_VALUE)
if not py.isint(type):
if py.isint(value):type=4
if py.istr(value):type=1
if py.isbyte(value):type=3 #TODO test,and add more rule
SetValueEx(r,'ProxyEnable',0,type,value)
if get(skey,name,root=root,returnType=False)==value:
return 'reg.set [{}] {}={} sucess!'.format(skey[-55:],name,value)
else:
return 'reg.set [{}] {}={} Failed !'.format(skey,name,value)
REG_TYPE={ 0 : 'REG_NONE',
1 : 'REG_SZ',
2 : 'REG_EXPAND_SZ',
3 : 'REG_BINARY',
4 : 'REG_DWORD',
5 : 'REG_DWORD_BIG_ENDIAN',
6 : 'REG_LINK',
7 : 'REG_MULTI_SZ',
8 : 'REG_RESOURCE_LIST',
9 : 'REG_FULL_RESOURCE_DESCRIPTOR',
10: 'REG_RESOURCE_REQUIREMENTS_LIST',
11: 'REG_QWORD'}
| 29.894737
| 119
| 0.693662
| 258
| 1,704
| 4.426357
| 0.465116
| 0.014011
| 0.028021
| 0.038529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02095
| 0.159624
| 1,704
| 56
| 120
| 30.428571
| 0.776536
| 0.245892
| 0
| 0.071429
| 0
| 0
| 0.209703
| 0.045383
| 0
| 0
| 0
| 0.017857
| 0
| 1
| 0.047619
| false
| 0
| 0.142857
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ec65d0e2393fe675648f46032adc3e480a8ef52
| 1,032
|
py
|
Python
|
examples/resources.py
|
willvousden/clint
|
6dc7ab1a6a162750e968463b43994447bca32544
|
[
"0BSD"
] | 1,230
|
2015-01-03T05:39:25.000Z
|
2020-02-18T12:36:03.000Z
|
examples/resources.py
|
willvousden/clint
|
6dc7ab1a6a162750e968463b43994447bca32544
|
[
"0BSD"
] | 50
|
2015-01-06T17:58:20.000Z
|
2018-03-19T13:25:22.000Z
|
examples/resources.py
|
willvousden/clint
|
6dc7ab1a6a162750e968463b43994447bca32544
|
[
"0BSD"
] | 153
|
2015-01-03T03:56:25.000Z
|
2020-02-13T20:59:03.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import os
sys.path.insert(0, os.path.abspath('..'))
from clint import resources
resources.init('kennethreitz', 'clint')
lorem = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'
print('%s created.' % resources.user.path)
resources.user.write('lorem.txt', lorem)
print('lorem.txt created')
assert resources.user.read('lorem.txt') == lorem
print('lorem.txt has correct contents')
resources.user.delete('lorem.txt')
print('lorem.txt deleted')
assert resources.user.read('lorem.txt') == None
print('lorem.txt deletion confirmed')
| 33.290323
| 456
| 0.767442
| 151
| 1,032
| 5.211921
| 0.662252
| 0.081321
| 0.066074
| 0.045743
| 0.134689
| 0.134689
| 0
| 0
| 0
| 0
| 0
| 0.002222
| 0.127907
| 1,032
| 30
| 457
| 34.4
| 0.872222
| 0.040698
| 0
| 0
| 0
| 0.0625
| 0.611336
| 0
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ecb9ff079e3fe67fcf620b3218ea8892b9b9c1c
| 1,726
|
py
|
Python
|
utils/utils.py
|
scomup/StereoNet-ActiveStereoNet
|
05994cf1eec4a109e095732fe01ecb5558880ba5
|
[
"MIT"
] | null | null | null |
utils/utils.py
|
scomup/StereoNet-ActiveStereoNet
|
05994cf1eec4a109e095732fe01ecb5558880ba5
|
[
"MIT"
] | null | null | null |
utils/utils.py
|
scomup/StereoNet-ActiveStereoNet
|
05994cf1eec4a109e095732fe01ecb5558880ba5
|
[
"MIT"
] | null | null | null |
# ------------------------------------------------------------------------------
# Copyright (c) NKU
# Licensed under the MIT License.
# Written by Xuanyi Li ([email protected])
# ------------------------------------------------------------------------------
import os
import torch
import torch.nn.functional as F
#import cv2 as cv
import numpy as np
def GERF_loss(GT, pred, args):
# mask = (GT < args.maxdisp) & (GT >= 0)
mask = GT > 0
mask.detach_()
# print(mask.size(), GT.size(), pred.size())
count = len(torch.nonzero(mask))
# print(count)
if count == 0:
count = 1
return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask], 2) + 4) /2 - 1) / count
def smooth_L1_loss(GT, pred, args):
mask = GT < args.maxdisp
mask.detach_()
# loss = F.smooth_l1_loss(pred[mask], GT[mask], size_average=True)
loss = (pred[mask] - GT[mask]).abs().mean()
return loss
if __name__ == '__main__':
pass
# import matplotlib.pyplot as plt
# image = cv.imread('/media/lxy/sdd1/ActiveStereoNet/StereoNet_pytorch/results/forvideo/iter-122.jpg')
#im_gray = cv.imread('/media/lxy/sdd1/ActiveStereoNet/StereoNet_pytorch/results/forvideo/iter-133.jpg', cv.IMREAD_GRAYSCALE)
# print(im_gray.shape)
#im_color = cv.applyColorMap(im_gray*2, cv.COLORMAP_JET)
# cv.imshow('test', im_color)
# cv.waitKey(0)
#cv.imwrite('test.png',im_color)
# print(image.shape)
# plt.figure('Image')
# sc =plt.imshow(image)
# sc.set_cmap('hsv')
# plt.colorbar()
# plt.axis('off')
# plt.show()
# print('end')
# image[:,:,0].save('/media/lxy/sdd1/ActiveStereoNet/StereoNet_pytorch/results/pretrained_StereoNet_single/it1er-151.jpg')
| 32.566038
| 128
| 0.589803
| 228
| 1,726
| 4.333333
| 0.45614
| 0.030364
| 0.036437
| 0.081984
| 0.291498
| 0.255061
| 0.255061
| 0.204453
| 0.1417
| 0.1417
| 0
| 0.018881
| 0.171495
| 1,726
| 52
| 129
| 33.192308
| 0.672028
| 0.631518
| 0
| 0.111111
| 0
| 0
| 0.013158
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0.055556
| 0.222222
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0ecd026a7b7cddee19fb7d65983aadf807f4917d
| 657
|
py
|
Python
|
rblod/setup.py
|
TiKeil/Two-scale-RBLOD
|
23f17a3e4edf63ea5f208eca50ca90c19bf511a9
|
[
"BSD-2-Clause"
] | null | null | null |
rblod/setup.py
|
TiKeil/Two-scale-RBLOD
|
23f17a3e4edf63ea5f208eca50ca90c19bf511a9
|
[
"BSD-2-Clause"
] | null | null | null |
rblod/setup.py
|
TiKeil/Two-scale-RBLOD
|
23f17a3e4edf63ea5f208eca50ca90c19bf511a9
|
[
"BSD-2-Clause"
] | null | null | null |
# ~~~
# This file is part of the paper:
#
# " An Online Efficient Two-Scale Reduced Basis Approach
# for the Localized Orthogonal Decomposition "
#
# https://github.com/TiKeil/Two-scale-RBLOD.git
#
# Copyright 2019-2021 all developers. All rights reserved.
# License: Licensed as BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
# Authors:
# Stephan Rave
# Tim Keil
# ~~~
from setuptools import setup
setup(name='rblod',
version='2021.1',
description='Pymor support for RBLOD',
author='Tim Keil',
author_email='[email protected]',
license='MIT',
packages=['rblod'])
| 26.28
| 89
| 0.648402
| 83
| 657
| 5.120482
| 0.759036
| 0.049412
| 0.047059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.223744
| 657
| 24
| 90
| 27.375
| 0.803922
| 0.605784
| 0
| 0
| 0
| 0
| 0.266393
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ecdf401d5b3926e749aa892bfa6a87de7f72b30
| 8,060
|
py
|
Python
|
bin/euclid_fine_plot_job_array.py
|
ndeporzio/cosmicfish
|
f68f779d73f039512a958d110bb44194d0daceec
|
[
"MIT"
] | null | null | null |
bin/euclid_fine_plot_job_array.py
|
ndeporzio/cosmicfish
|
f68f779d73f039512a958d110bb44194d0daceec
|
[
"MIT"
] | null | null | null |
bin/euclid_fine_plot_job_array.py
|
ndeporzio/cosmicfish
|
f68f779d73f039512a958d110bb44194d0daceec
|
[
"MIT"
] | null | null | null |
import os
import shutil
import numpy as np
import pandas as pd
import seaborn as sns
import cosmicfish as cf
import matplotlib.pyplot as plt
import dill
# Instruct pyplot to use seaborn
sns.set()
# Set project, data, CLASS directories
projectdir = os.environ['STORAGE_DIR']
datastore = os.environ['DATASTORE_DIR']
classpath = os.environ['CLASS_DIR']
fidx = int(os.environ['FORECAST_INDEX'])
# Generate output paths
fp_resultsdir = projectdir
cf.makedirectory(fp_resultsdir)
# Specify resolution of numerical integrals
derivative_step = 0.008 # How much to vary parameter to calculate numerical derivative
g_derivative_step = 0.1
mu_integral_step = 0.05 # For calculating numerical integral wrt mu between -1 and 1
# Linda Fiducial Cosmology
fp_fid = {
"A_s" : 2.2321e-9,
"n_s" : 0.967,
"omega_b" : 0.02226,
"omega_cdm" : 0.1127,
"tau_reio" : 0.0598,
"h" : 0.701,
"T_cmb" : 2.726, # Units [K]
"N_ncdm" : 4.,
"deg_ncdm" : 1.0,
"T_ncdm" : (0.79/2.726), # Units [T_cmb].
"m_ncdm" : 0.01, # Units [eV]
"b0" : 1.0,
"beta0" : 1.7,
"beta1" : 1.0,
"alphak2" : 1.0,
"sigma_fog_0" : 250000, #Units [m s^-2]
"N_eff" : 0.0064, #We allow relativistic neutrinos in addition to our DM relic
"relic_vary" : "N_ncdm", # Fix T_ncdm or m_ncdm
"m_nu" : 0.02
}
# EUCLID values
z_table = np.array([0.65, 0.75, 0.85, 0.95, 1.05, 1.15, 1.25, 1.35, 1.45, 1.55, 1.65, 1.75, 1.85, 1.95])
dNdz = np.array([2434.280, 4364.812, 4728.559, 4825.798, 4728.797, 4507.625, 4269.851, 3720.657, 3104.309,
2308.975, 1514.831, 1474.707, 893.716, 497.613])
skycover = 0.3636
# Run Fisher Forecast
full_masses = np.geomspace(0.01, 10., 21)
full_temps = np.array([0.79, 0.91, 0.94, 1.08])
mass_index=(fidx % 21)
temp_index=(fidx // 21)
masses = np.array([full_masses[mass_index]])
temps = np.array([full_temps[temp_index]])
omegacdm_set = np.array([
fp_fid['omega_cdm']
- ((masses/cf.NEUTRINO_SCALE_FACTOR)* np.power(tval / 1.95, 3.))
for tidx, tval in enumerate(temps)])
fp_fiducialset = [[
dict(fp_fid, **{
'm_ncdm' : masses[midx],
'omega_cdm' : omegacdm_set[tidx, midx],
'T_ncdm' : temps[tidx]/2.726})
for midx, mval in enumerate(masses)]
for tidx, tval in enumerate(temps)]
fp_forecastset = [[cf.forecast(
classpath,
datastore,
'2relic',
fidval,
z_table,
"EUCLID",
dNdz,
fsky=skycover,
dstep=derivative_step,
gstep=g_derivative_step,
RSD=True,
FOG=True,
AP=True,
COV=True)
for fididx, fidval in enumerate(fidrowvals)]
for fidrowidx, fidrowvals in enumerate(fp_fiducialset)]
#dill.load_session('')
for frowidx, frowval in enumerate(fp_forecastset):
for fidx, fcst in enumerate(frowval):
if type(fcst.fisher)==type(None):
fcst.gen_pm()
fcst.gen_fisher(
fisher_order=[
'omega_b',
'omega_cdm',
'n_s',
'A_s',
'tau_reio',
'h',
'N_ncdm',
'M_ncdm',
'sigma_fog',
'beta0',
'beta1',
'alpha_k2'],
mu_step=mu_integral_step,
skipgen=False)
print("Relic Forecast ", fidx, " complete...")
dill.dump_session(os.path.join(fp_resultsdir, 'fp_'+str(temp_index)+'_'+str(mass_index)+'.db'))
else:
print('Fisher matrix already generated!')
| 65.528455
| 116
| 0.262655
| 511
| 8,060
| 3.986301
| 0.448141
| 0.037801
| 0.014728
| 0.012764
| 0.028473
| 0.028473
| 0.028473
| 0
| 0
| 0
| 0
| 0.100917
| 0.675434
| 8,060
| 122
| 117
| 66.065574
| 0.677752
| 0.121216
| 0
| 0
| 0
| 0
| 0.048801
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.079208
| 0
| 0.079208
| 0.019802
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ecedb23d891d612188b09f34a36b454a3d85a93
| 6,674
|
py
|
Python
|
src/oci/apm_traces/models/query_result_row_type_summary.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 249
|
2017-09-11T22:06:05.000Z
|
2022-03-04T17:09:29.000Z
|
src/oci/apm_traces/models/query_result_row_type_summary.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 228
|
2017-09-11T23:07:26.000Z
|
2022-03-23T10:58:50.000Z
|
src/oci/apm_traces/models/query_result_row_type_summary.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 224
|
2017-09-27T07:32:43.000Z
|
2022-03-25T16:55:42.000Z
|
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class QueryResultRowTypeSummary(object):
"""
A summary of the datatype, unit and related metadata of an individual row element of a query result row that is returned.
"""
def __init__(self, **kwargs):
"""
Initializes a new QueryResultRowTypeSummary object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param data_type:
The value to assign to the data_type property of this QueryResultRowTypeSummary.
:type data_type: str
:param unit:
The value to assign to the unit property of this QueryResultRowTypeSummary.
:type unit: str
:param display_name:
The value to assign to the display_name property of this QueryResultRowTypeSummary.
:type display_name: str
:param expression:
The value to assign to the expression property of this QueryResultRowTypeSummary.
:type expression: str
:param query_result_row_type_summaries:
The value to assign to the query_result_row_type_summaries property of this QueryResultRowTypeSummary.
:type query_result_row_type_summaries: list[oci.apm_traces.models.QueryResultRowTypeSummary]
"""
self.swagger_types = {
'data_type': 'str',
'unit': 'str',
'display_name': 'str',
'expression': 'str',
'query_result_row_type_summaries': 'list[QueryResultRowTypeSummary]'
}
self.attribute_map = {
'data_type': 'dataType',
'unit': 'unit',
'display_name': 'displayName',
'expression': 'expression',
'query_result_row_type_summaries': 'queryResultRowTypeSummaries'
}
self._data_type = None
self._unit = None
self._display_name = None
self._expression = None
self._query_result_row_type_summaries = None
@property
def data_type(self):
"""
Gets the data_type of this QueryResultRowTypeSummary.
Datatype of the query result row element.
:return: The data_type of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._data_type
@data_type.setter
def data_type(self, data_type):
"""
Sets the data_type of this QueryResultRowTypeSummary.
Datatype of the query result row element.
:param data_type: The data_type of this QueryResultRowTypeSummary.
:type: str
"""
self._data_type = data_type
@property
def unit(self):
"""
Gets the unit of this QueryResultRowTypeSummary.
Granular unit in which the query result row element's data is represented.
:return: The unit of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._unit
@unit.setter
def unit(self, unit):
"""
Sets the unit of this QueryResultRowTypeSummary.
Granular unit in which the query result row element's data is represented.
:param unit: The unit of this QueryResultRowTypeSummary.
:type: str
"""
self._unit = unit
@property
def display_name(self):
"""
Gets the display_name of this QueryResultRowTypeSummary.
Alias name if an alias is used for the query result row element or an assigned display name from the query language
in some default cases.
:return: The display_name of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this QueryResultRowTypeSummary.
Alias name if an alias is used for the query result row element or an assigned display name from the query language
in some default cases.
:param display_name: The display_name of this QueryResultRowTypeSummary.
:type: str
"""
self._display_name = display_name
@property
def expression(self):
"""
Gets the expression of this QueryResultRowTypeSummary.
Actual show expression in the user typed query that produced this column.
:return: The expression of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._expression
@expression.setter
def expression(self, expression):
"""
Sets the expression of this QueryResultRowTypeSummary.
Actual show expression in the user typed query that produced this column.
:param expression: The expression of this QueryResultRowTypeSummary.
:type: str
"""
self._expression = expression
@property
def query_result_row_type_summaries(self):
"""
Gets the query_result_row_type_summaries of this QueryResultRowTypeSummary.
A query result row type summary object that represents a nested table structure.
:return: The query_result_row_type_summaries of this QueryResultRowTypeSummary.
:rtype: list[oci.apm_traces.models.QueryResultRowTypeSummary]
"""
return self._query_result_row_type_summaries
@query_result_row_type_summaries.setter
def query_result_row_type_summaries(self, query_result_row_type_summaries):
"""
Sets the query_result_row_type_summaries of this QueryResultRowTypeSummary.
A query result row type summary object that represents a nested table structure.
:param query_result_row_type_summaries: The query_result_row_type_summaries of this QueryResultRowTypeSummary.
:type: list[oci.apm_traces.models.QueryResultRowTypeSummary]
"""
self._query_result_row_type_summaries = query_result_row_type_summaries
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 33.878173
| 245
| 0.674408
| 787
| 6,674
| 5.503177
| 0.190597
| 0.068575
| 0.087278
| 0.083122
| 0.626414
| 0.532671
| 0.414916
| 0.315862
| 0.315862
| 0.287693
| 0
| 0.003669
| 0.264909
| 6,674
| 196
| 246
| 34.05102
| 0.879128
| 0.557537
| 0
| 0.080645
| 0
| 0
| 0.102352
| 0.052265
| 0
| 0
| 0
| 0
| 0
| 1
| 0.225806
| false
| 0
| 0.032258
| 0.032258
| 0.419355
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ed3370d325b05dcd0ff4ac3d8d74980237e624c
| 1,004
|
py
|
Python
|
anti_cpdaily/command.py
|
hyx0329/nonebot_plugin_anti_cpdaily
|
5868626fb95876f9638aaa1edd9a2f914ea7bed1
|
[
"MIT"
] | 2
|
2021-11-07T10:33:16.000Z
|
2021-12-20T08:25:19.000Z
|
anti_cpdaily/command.py
|
hyx0329/nonebot_plugin_anti_cpdaily
|
5868626fb95876f9638aaa1edd9a2f914ea7bed1
|
[
"MIT"
] | null | null | null |
anti_cpdaily/command.py
|
hyx0329/nonebot_plugin_anti_cpdaily
|
5868626fb95876f9638aaa1edd9a2f914ea7bed1
|
[
"MIT"
] | null | null | null |
import nonebot
from nonebot import on_command
from nonebot.rule import to_me
from nonebot.typing import T_State
from nonebot.adapters import Bot, Event
from nonebot.log import logger
from .config import global_config
from .schedule import anti_cpdaily_check_routine
cpdaily = on_command('cpdaily')
scheduler = nonebot.require("nonebot_plugin_apscheduler").scheduler
async def one_shot_routine():
scheduler.remove_job('anti_cpdaily_oneshot')
await anti_cpdaily_check_routine()
@cpdaily.handle()
async def handle_command(bot: Bot, event: Event, state: T_State):
""" Manually activate the routine in 1 min
"""
if event.get_user_id() in bot.config.superusers:
logger.debug('manually activate the cpdaily routine')
# await anti_cpdaily_check_routine()
scheduler.add_job(one_shot_routine, trigger='interval', minutes=1, id='anti_cpdaily_oneshot', replace_existing=True)
logger.debug('manual process end')
await cpdaily.finish('启动今日校园打卡程序ing')
| 32.387097
| 124
| 0.76494
| 136
| 1,004
| 5.419118
| 0.441176
| 0.074627
| 0.065129
| 0.093623
| 0.126187
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002342
| 0.149402
| 1,004
| 30
| 125
| 33.466667
| 0.860656
| 0.033865
| 0
| 0
| 0
| 0
| 0.162309
| 0.028322
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
0ed495b3d64a671dbd7202470a06b2b18d6c7be4
| 155
|
py
|
Python
|
tests/inputs/loops/51-arrays-in-loop.py
|
helq/pytropos
|
497ed5902e6e4912249ca0a46b477f9bfa6ae80a
|
[
"MIT"
] | 4
|
2019-10-06T18:01:24.000Z
|
2020-07-03T05:27:35.000Z
|
tests/inputs/loops/51-arrays-in-loop.py
|
helq/pytropos
|
497ed5902e6e4912249ca0a46b477f9bfa6ae80a
|
[
"MIT"
] | 5
|
2021-06-07T15:50:04.000Z
|
2021-06-07T15:50:06.000Z
|
tests/inputs/loops/51-arrays-in-loop.py
|
helq/pytropos
|
497ed5902e6e4912249ca0a46b477f9bfa6ae80a
|
[
"MIT"
] | null | null | null |
import numpy as np
from something import Top
i = 0
while i < 10:
a = np.ndarray((10,4))
b = np.ones((10, Top))
i += 1
del Top
# show_store()
| 12.916667
| 26
| 0.580645
| 29
| 155
| 3.068966
| 0.689655
| 0.089888
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080357
| 0.277419
| 155
| 11
| 27
| 14.090909
| 0.714286
| 0.077419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ed9b178770e9775a60fa8ee66730cd786425565
| 448
|
py
|
Python
|
test/test_delete_group.py
|
ruslankl9/ironpython_training
|
51eaad4da24fdce60fbafee556160a9e847c08cf
|
[
"Apache-2.0"
] | null | null | null |
test/test_delete_group.py
|
ruslankl9/ironpython_training
|
51eaad4da24fdce60fbafee556160a9e847c08cf
|
[
"Apache-2.0"
] | null | null | null |
test/test_delete_group.py
|
ruslankl9/ironpython_training
|
51eaad4da24fdce60fbafee556160a9e847c08cf
|
[
"Apache-2.0"
] | null | null | null |
from model.group import Group
import random
def test_delete_some_group(app):
if len(app.group.get_group_list()) <= 1:
app.group.add_new_group(Group(name='test'))
old_list = app.group.get_group_list()
index = random.randrange(len(old_list))
app.group.delete_group_by_index(index)
new_list = app.group.get_group_list()
assert len(old_list) - 1 == len(new_list)
del old_list[index]
assert old_list == new_list
| 32
| 51
| 0.712054
| 73
| 448
| 4.068493
| 0.328767
| 0.13468
| 0.111111
| 0.161616
| 0.228956
| 0.161616
| 0
| 0
| 0
| 0
| 0
| 0.005376
| 0.169643
| 448
| 14
| 52
| 32
| 0.793011
| 0
| 0
| 0
| 0
| 0
| 0.008909
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.083333
| false
| 0
| 0.166667
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0eda1b4f399b44a556364cedf6c955fb55a3872c
| 2,355
|
py
|
Python
|
src/decisionengine/framework/modules/tests/test_module_decorators.py
|
moibenko/decisionengine
|
4c458e0c225ec2ce1e82d56e752724983331b7d1
|
[
"Apache-2.0"
] | 9
|
2018-06-11T20:06:50.000Z
|
2020-10-01T17:02:02.000Z
|
src/decisionengine/framework/modules/tests/test_module_decorators.py
|
moibenko/decisionengine
|
4c458e0c225ec2ce1e82d56e752724983331b7d1
|
[
"Apache-2.0"
] | 551
|
2018-06-25T21:06:37.000Z
|
2022-03-31T13:47:32.000Z
|
src/decisionengine/framework/modules/tests/test_module_decorators.py
|
goodenou/decisionengine
|
b203e2c493cf501562accf1013c6257c348711b7
|
[
"Apache-2.0"
] | 70
|
2018-06-11T20:07:01.000Z
|
2022-02-10T16:18:24.000Z
|
# SPDX-FileCopyrightText: 2017 Fermi Research Alliance, LLC
# SPDX-License-Identifier: Apache-2.0
import pytest
from decisionengine.framework.modules import Publisher, Source
from decisionengine.framework.modules.Module import verify_products
from decisionengine.framework.modules.Source import Parameter
def test_multiple_consumes_declarations():
with pytest.raises(Exception, match="@consumes has already been called"):
@Publisher.consumes(a=int)
@Publisher.consumes(b=float)
class _(Publisher.Publisher):
pass
def test_multiple_produces_declarations():
with pytest.raises(Exception, match="@produces has already been called"):
@Source.produces(c=str)
@Source.produces(d=bool)
class _(Source.Source):
pass
def test_wrong_product_names():
@Source.produces(a=str)
class BMaker(Source.Source):
def __init__(self, config):
super().__init__(config)
def acquire(self):
return {"b": ""}
maker = BMaker({"channel_name": "test"})
expected_err_msg = (
"The following products were not produced:\n"
+ " - 'a' of type 'str'\n\n"
+ "The following products were not declared:\n"
+ " - 'b' of type 'str'"
)
with pytest.raises(Exception, match=expected_err_msg):
verify_products(maker, maker.acquire())
def test_wrong_product_types():
@Source.produces(a=str, b=int)
class AMaker(Source.Source):
def __init__(self, config):
super().__init__(config)
def acquire(self):
return {"a": 42, "b": 17}
maker = AMaker({"channel_name": "test"})
expected_err_msg = "The following products have the wrong types:\n" + r" - 'a' \(expected 'str', got 'int'\)"
with pytest.raises(Exception, match=expected_err_msg):
verify_products(maker, maker.acquire())
def test_supports_config():
expected_err_msg = (
"An error occurred while processing the parameter 'conflicting_types':\n"
+ "The specified type 'int' conflicts with the type of the default value "
+ r"'hello' \(type 'str'\)"
)
with pytest.raises(Exception, match=expected_err_msg):
@Source.supports_config(Parameter("conflicting_types", type=int, default="hello"))
class _(Source.Source):
pass
| 31.4
| 113
| 0.656476
| 281
| 2,355
| 5.313167
| 0.327402
| 0.044206
| 0.056263
| 0.083724
| 0.379102
| 0.35633
| 0.300067
| 0.300067
| 0.300067
| 0.234427
| 0
| 0.005467
| 0.223355
| 2,355
| 74
| 114
| 31.824324
| 0.810826
| 0.03949
| 0
| 0.339623
| 0
| 0
| 0.220452
| 0.009739
| 0
| 0
| 0
| 0
| 0
| 1
| 0.169811
| false
| 0.056604
| 0.075472
| 0.037736
| 0.377358
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0edc64834d9ac7d861217e389cda5a4bf52a203f
| 1,129
|
py
|
Python
|
musicscore/musicxml/types/complextypes/backup.py
|
alexgorji/music_score
|
b4176da52295361f3436826903485c5cb8054c5e
|
[
"MIT"
] | 2
|
2020-06-22T13:33:28.000Z
|
2020-12-30T15:09:00.000Z
|
musicscore/musicxml/types/complextypes/backup.py
|
alexgorji/music_score
|
b4176da52295361f3436826903485c5cb8054c5e
|
[
"MIT"
] | 37
|
2020-02-18T12:15:00.000Z
|
2021-12-13T20:01:14.000Z
|
musicscore/musicxml/types/complextypes/backup.py
|
alexgorji/music_score
|
b4176da52295361f3436826903485c5cb8054c5e
|
[
"MIT"
] | null | null | null |
'''
<xs:complexType name="backup">
<xs:annotation>
<xs:documentation></xs:documentation>
</xs:annotation>
<xs:sequence>
<xs:group ref="duration"/>
<xs:group ref="editorial"/>
</xs:sequence>
</xs:complexType>
'''
from musicscore.dtd.dtd import Sequence, GroupReference, Element
from musicscore.musicxml.groups.common import Editorial
from musicscore.musicxml.elements.note import Duration
from musicscore.musicxml.types.complextypes.complextype import ComplexType
class ComplexTypeBackup(ComplexType):
"""
The backup and forward elements are required to coordinate multiple voices in one part, including music on multiple
staves. The backup type is generally used to move between voices and staves. Thus the backup element does not
include voice or staff elements. Duration values should always be positive, and should not cross measure boundaries
or mid-measure changes in the divisions value.
"""
_DTD = Sequence(
Element(Duration),
GroupReference(Editorial)
)
def __init__(self, tag, *args, **kwargs):
super().__init__(tag=tag, *args, **kwargs)
| 35.28125
| 119
| 0.732507
| 139
| 1,129
| 5.884892
| 0.532374
| 0.06846
| 0.080685
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170948
| 1,129
| 31
| 120
| 36.419355
| 0.873932
| 0.595217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.363636
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
0ee1c3866e5f2d77866339896a7b340616b1337d
| 414
|
py
|
Python
|
Python tests/dictionaries.py
|
Johnny-QA/Python_training
|
a15de68195eb155c99731db3e4ff1d9d75681752
|
[
"Apache-2.0"
] | null | null | null |
Python tests/dictionaries.py
|
Johnny-QA/Python_training
|
a15de68195eb155c99731db3e4ff1d9d75681752
|
[
"Apache-2.0"
] | null | null | null |
Python tests/dictionaries.py
|
Johnny-QA/Python_training
|
a15de68195eb155c99731db3e4ff1d9d75681752
|
[
"Apache-2.0"
] | null | null | null |
my_set = {1, 3, 5}
my_dict = {'name': 'Jose', 'age': 90}
another_dict = {1: 15, 2: 75, 3: 150}
lottery_players = [
{
'name': 'Rolf',
'numbers': (13, 45, 66, 23, 22)
},
{
'name': 'John',
'numbers': (14, 56, 80, 23, 22)
}
]
universities = [
{
'name': 'Oxford',
'location': 'UK'
},
{
'name': 'MIT',
'location': 'US'
}
]
| 16.56
| 39
| 0.398551
| 46
| 414
| 3.5
| 0.73913
| 0.049689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135659
| 0.376812
| 414
| 25
| 40
| 16.56
| 0.488372
| 0
| 0
| 0
| 0
| 0
| 0.187952
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ee4cfc2dd5204b72c6c610aac6abe376e79a7c9
| 3,765
|
py
|
Python
|
3-functions/pytest-exercises/test_functions.py
|
BaseCampCoding/python-fundamentals
|
3804c07841d6604b1e5a1c15126b3301aa8ae306
|
[
"MIT"
] | null | null | null |
3-functions/pytest-exercises/test_functions.py
|
BaseCampCoding/python-fundamentals
|
3804c07841d6604b1e5a1c15126b3301aa8ae306
|
[
"MIT"
] | 1
|
2018-07-18T18:01:22.000Z
|
2019-06-14T15:06:28.000Z
|
3-functions/pytest-exercises/test_functions.py
|
BaseCampCoding/python-fundamentals
|
3804c07841d6604b1e5a1c15126b3301aa8ae306
|
[
"MIT"
] | null | null | null |
import functions
from pytest import approx
from bcca.test import should_print
def test_add_em_up():
assert functions.add_em_up(1, 2, 3) == 6
assert functions.add_em_up(4, 5, 6) == 15
def test_sub_sub_hubbub():
assert functions.sub_sub_hubbub(1, 2, 3) == -4
def test_square_area():
assert functions.square_area(5, 5) == 25
assert functions.square_area(3, 5) == 15
assert functions.square_area(2, 2) == 4
def test_circle_area():
assert functions.circle_area(1) == approx(3.14)
assert functions.circle_area(5) == approx(78.5)
def test_kilometers_to_miles():
assert functions.kilometers_to_miles(1) == approx(0.6214)
assert functions.kilometers_to_miles(.5) == approx(0.3107)
assert functions.kilometers_to_miles(0) == approx(0.0)
assert functions.kilometers_to_miles(40) == approx(24.855999999999998)
@should_print
def test_sales_tax_1(output):
functions.sales_tax(1)
assert output == """
Purchase Amount: 1
State Sales Tax: 0.04
County Sales Tax: 0.02
Total Sales Tax: 0.06
Total Cost: 1.06
"""
@should_print
def test_sales_tax_99_99(output):
functions.sales_tax(99.99)
assert output == """
Purchase Amount: 99.99
State Sales Tax: 3.9996
County Sales Tax: 1.9998
Total Sales Tax: 5.9994
Total Cost: 105.98939999999999
"""
@should_print
def test_sales_tax_5_95(output):
functions.sales_tax(5.95)
assert output == """
Purchase Amount: 5.95
State Sales Tax: 0.23800000000000002
County Sales Tax: 0.11900000000000001
Total Sales Tax: 0.35700000000000004
Total Cost: 6.307
"""
def test_min_insurance():
assert functions.min_insurance(100000) == approx(80000.0)
assert functions.min_insurance(123456789) == approx(98765431.2)
assert functions.min_insurance(0) == approx(0.0)
assert functions.min_insurance(-54317890) == approx(-43454312.0)
@should_print
def test_property_tax_10000(output):
functions.property_tax(10000)
assert output == '''
Assessment Value: 6000.0
Property Tax: 38.4
'''
@should_print
def test_property_tax_99999_95(output):
functions.property_tax(99999.95)
assert output == '''
Assessment Value: 59999.969999999994
Property Tax: 383.999808
'''
def test_bmi():
assert functions.bmi(160, 67) == approx(25.05680552)
assert functions.bmi(200, 72) == approx(27.12191358)
assert functions.bmi(120, 60) == approx(23.43333333)
def test_calories():
assert functions.calories(5, 20) == 125
assert functions.calories(1, 1) == 13
def test_earnings():
assert functions.earnings(100, 100, 100) == 3600
assert functions.earnings(50, 75, 100) == 2550
assert functions.earnings(0, 1000, 79) == 12711
@should_print
def test_paint_job_estimator(output):
functions.paint_job_estimator(50, 10)
assert output == '''
Gallons of paint required: 0.43478260869565216
Hours of labor required: 3.4782608695652173
Cost of paint: 4.3478260869565215
Cost of labor: 69.56521739130434
Total Cost: 73.91304347826086
'''
@should_print
def test_paint_job_estimator_2(output):
functions.paint_job_estimator(750, 15.95)
assert output == '''
Gallons of paint required: 6.521739130434782
Hours of labor required: 52.17391304347826
Cost of paint: 104.02173913043477
Cost of labor: 1043.4782608695652
Total Cost: 1147.5
'''
@should_print
def test_monthly_sales_tax(output):
functions.monthly_sales_tax(123456.79)
assert output == '''
Monthly sales: 123456.79
State sales tax: 4938.2716
County sales tax: 2469.1358
Total sales tax: 7407.4074
'''
@should_print
def test_monthly_sales_tax_2(output):
functions.monthly_sales_tax(4321567.21)
assert output == '''
Monthly sales: 4321567.21
State sales tax: 172862.6884
County sales tax: 86431.3442
Total sales tax: 259294.03260000004
'''
| 22.957317
| 74
| 0.733068
| 554
| 3,765
| 4.808664
| 0.263538
| 0.075075
| 0.052553
| 0.067568
| 0.266141
| 0.145646
| 0.051051
| 0
| 0
| 0
| 0
| 0.204895
| 0.153519
| 3,765
| 163
| 75
| 23.09816
| 0.631001
| 0
| 0
| 0.228814
| 0
| 0
| 0.288712
| 0
| 0
| 0
| 0
| 0
| 0.279661
| 1
| 0.152542
| false
| 0
| 0.025424
| 0
| 0.177966
| 0.084746
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0eeba77c6034df540d6e02d1c1935e84c28bdcd9
| 10,427
|
py
|
Python
|
tools/archive/create_loadable_configs.py
|
madelinemccombe/iron-skillet
|
f7bb805ac5ed0f2b44e4b438f8c021eaf2f5c66b
|
[
"MIT"
] | null | null | null |
tools/archive/create_loadable_configs.py
|
madelinemccombe/iron-skillet
|
f7bb805ac5ed0f2b44e4b438f8c021eaf2f5c66b
|
[
"MIT"
] | null | null | null |
tools/archive/create_loadable_configs.py
|
madelinemccombe/iron-skillet
|
f7bb805ac5ed0f2b44e4b438f8c021eaf2f5c66b
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2018, Palo Alto Networks
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# Author: Scott Shoaf <[email protected]>
'''
Palo Alto Networks create_loadable_configs.py
Provides rendering of configuration templates with user defined values
Output is a set of loadable full configurations and set commands for Panos and Panorama
Edit the config_variables.yaml values and then run the script
This software is provided without support, warranty, or guarantee.
Use at your own risk.
'''
import datetime
import os
import shutil
import sys
import time
import getpass
import oyaml
from jinja2 import Environment, FileSystemLoader
from passlib.hash import des_crypt
from passlib.hash import md5_crypt
from passlib.hash import sha256_crypt
from passlib.hash import sha512_crypt
defined_filters = ['md5_hash', 'des_hash', 'sha512_hash']
def myconfig_newdir(myconfigdir_name, foldertime):
'''
create a new main loadable_configs folder if required then new subdirectories for configs
:param myconfigdir_name: prefix folder name from the my_variables.py file
:param foldertime: datetime when script run; to be used as suffix of folder name
:return: the myconfigdir full path name
'''
# get the full path to the config directory we want (panos / panorama)
myconfigpath = os.path.abspath(os.path.join('..', 'loadable_configs'))
if os.path.isdir(myconfigpath) is False:
os.mkdir(myconfigpath, mode=0o755)
print('created new loadable config directory')
# check that configs folder exists and if not create a new one
# then create snippets and full sub-directories
myconfigdir = '{0}/{1}-{2}'.format(myconfigpath, myconfigdir_name, foldertime)
if os.path.isdir(myconfigdir) is False:
os.mkdir(myconfigdir, mode=0o755)
print('\ncreated new archive folder {0}-{1}'.format(myconfigdir_name, foldertime))
if os.path.isdir('{0}/{1}'.format(myconfigdir, config_type)) is False:
os.mkdir('{0}/{1}'.format(myconfigdir, config_type))
print('created new subdirectories for {0}'.format(config_type))
return myconfigdir
def create_context(config_var_file):
# read the metafile to get variables and values
try:
with open(config_var_file, 'r') as var_metadata:
variables = oyaml.safe_load(var_metadata.read())
except IOError as ioe:
print(f'Could not open metadata file {config_var_file}')
print(ioe)
sys.exit()
# grab the metadata values and convert to key-based dictionary
jinja_context = dict()
for snippet_var in variables['variables']:
jinja_context[snippet_var['name']] = snippet_var['value']
return jinja_context
def template_render(filename, template_path, render_type, context):
'''
render the jinja template using the context value from config_variables.yaml
:param filename: name of the template file
:param template_path: path for the template file
:param render_type: type if full or set commands; aligns with folder name
:param context: dict of variables to render
:return: return the rendered xml file and set conf file
'''
print('..creating template for {0}'.format(filename))
env = Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path, render_type)))
# load our custom jinja filters here, see the function defs below for reference
env.filters['md5_hash'] = md5_hash
env.filters['des_hash'] = des_hash
env.filters['sha512_hash'] = sha512_hash
template = env.get_template(filename)
rendered_template = template.render(context)
return rendered_template
def template_save(snippet_name, myconfigdir, config_type, element):
'''
after rendering the template save to the myconfig directory
each run saves with a unique prefix name + datetime
:param snippet_name: name of the output file
:param myconfigdir: path to the my_config directory
:param config_type: based on initial run list; eg. panos or panorama
:param element: xml element rendered based on input variables; used as folder name
:param render_type: type eg. if full or snippets; aligns with folder name
:return: no value returned (future could be success code)
'''
print('..saving template for {0}'.format(snippet_name))
filename = snippet_name
with open('{0}/{1}/{2}'.format(myconfigdir, config_type, filename), 'w') as configfile:
configfile.write(element)
# copy the variables file used for the render into the my_template folder
var_file = 'loadable_config_vars/config_variables.yaml'
if os.path.isfile('{0}/{1}'.format(myconfigdir, var_file)) is False:
vfilesrc = var_file
vfiledst = '{0}/{1}'.format(myconfigdir, var_file)
shutil.copy(vfilesrc, vfiledst)
return
# define functions for custom jinja filters
def md5_hash(txt):
'''
Returns the MD5 Hashed secret for use as a password hash in the PanOS configuration
:param txt: text to be hashed
:return: password hash of the string with salt and configuration information. Suitable to place in the phash field
in the configurations
'''
return md5_crypt.hash(txt)
def des_hash(txt):
'''
Returns the DES Hashed secret for use as a password hash in the PanOS configuration
:param txt: text to be hashed
:return: password hash of the string with salt and configuration information. Suitable to place in the phash field
in the configurations
'''
return des_crypt.hash(txt)
def sha256_hash(txt):
'''
Returns the SHA256 Hashed secret for use as a password hash in the PanOS configuration
:param txt: text to be hashed
:return: password hash of the string with salt and configuration information. Suitable to place in the
phash field in the configurations
'''
return sha256_crypt.hash(txt)
def sha512_hash(txt):
'''
Returns the SHA512 Hashed secret for use as a password hash in the PanOS configuration
:param txt: text to be hashed
:return: password hash of the string with salt and configuration information. Suitable to place in the
phash field in the configurations
'''
return sha512_crypt.hash(txt)
def replace_variables(config_type, render_type, input_var):
'''
get the input variables and render the output configs with jinja2
inputs are read from the template directory and output to my_config
:param config_type: panos or panorama to read/write to the respective directories
:param archivetime: datetimestamp used for the output my_config folder naming
'''
config_variables = 'config_variables.yaml'
# create dict of values for the jinja template render
context = create_context(config_variables)
# update context dict with variables from user input
for snippet_var in input_var:
context[snippet_var] = input_var[snippet_var]
# get the full path to the output directory we want (panos / panorama)
template_path = os.path.abspath(os.path.join('..',
'templates', config_type))
# append to the sys path for module lookup
sys.path.append(template_path)
# output subdir located in loadable_configs dir
myconfig_path = myconfig_newdir(input_var['output_dir'], input_var['archive_time'])
# render full and set conf files
print('\nworking with {0} config template'.format(render_type))
if render_type == 'full':
filename = 'iron_skillet_{0}_full.xml'.format(config_type)
if render_type == 'set_commands':
filename = 'iron_skillet_{0}_full.conf'.format(config_type)
element = template_render(filename, template_path, render_type, context)
template_save(filename, myconfig_path, config_type, element)
print('\nconfigs have been created and can be found in {0}'.format(myconfig_path))
print('along with the metadata values used to render the configs\n')
return
if __name__ == '__main__':
# Use the timestamp to create a unique folder name
print('=' * 80)
print(' ')
print('Welcome to Iron-Skillet'.center(80))
print(' ')
print('=' * 80)
input_var = {}
# archive_time used as part of the my_config directory name
input_var['archive_time'] = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S')
print('\ndatetime used for folder creation: {0}\n'.format(input_var['archive_time']))
# this prompts for the prefix name of the output directory
input_var['output_dir'] = input('Enter the name of the output directory: ')
# this prompts for the superuser username to be added into the configuration; no default admin/admin used
input_var['ADMINISTRATOR_USERNAME'] = input('Enter the superuser administrator account username: ')
print('\na phash will be created for superuser {0} and added to the config file\n'.format(
input_var['ADMINISTRATOR_USERNAME']))
passwordmatch = False
# prompt for the superuser password to create a phash and store in the my_config files; no default admin/admin
while passwordmatch is False:
password1 = getpass.getpass("Enter the superuser administrator account password: ")
password2 = getpass.getpass("Enter password again to verify: ")
if password1 == password2:
input_var['ADMINISTRATOR_PASSWORD'] = password1
passwordmatch = True
else:
print('\nPasswords do not match. Please try again.\n')
# loop through all config types that have their respective template folders
for config_type in ['panos', 'panorama']:
for render_type in ['full', 'set_commands']:
replace_variables(config_type, render_type, input_var)
| 38.762082
| 118
| 0.720629
| 1,456
| 10,427
| 5.052198
| 0.225962
| 0.019032
| 0.006525
| 0.012915
| 0.216966
| 0.169657
| 0.143284
| 0.132953
| 0.107123
| 0.107123
| 0
| 0.011043
| 0.201017
| 10,427
| 269
| 119
| 38.762082
| 0.871924
| 0.458905
| 0
| 0.054545
| 0
| 0
| 0.219135
| 0.03357
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081818
| false
| 0.118182
| 0.109091
| 0
| 0.272727
| 0.163636
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0eed163a13b8bf28c8e3cc3018df9acf80f8ef9a
| 199
|
py
|
Python
|
app/apis/__init__.py
|
FabienArcellier/blueprint-webapp-flask-restx
|
84bc9dbe697c4b0f6667d2a2d8144a3f934a307a
|
[
"MIT"
] | null | null | null |
app/apis/__init__.py
|
FabienArcellier/blueprint-webapp-flask-restx
|
84bc9dbe697c4b0f6667d2a2d8144a3f934a307a
|
[
"MIT"
] | null | null | null |
app/apis/__init__.py
|
FabienArcellier/blueprint-webapp-flask-restx
|
84bc9dbe697c4b0f6667d2a2d8144a3f934a307a
|
[
"MIT"
] | null | null | null |
from flask_restx import Api
from app.apis.hello import api as hello
api = Api(
title='api',
version='1.0',
description='',
prefix='/api',
doc='/api'
)
api.add_namespace(hello)
| 14.214286
| 39
| 0.633166
| 29
| 199
| 4.275862
| 0.62069
| 0.145161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012821
| 0.21608
| 199
| 13
| 40
| 15.307692
| 0.782051
| 0
| 0
| 0
| 0
| 0
| 0.070352
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0eed507c0a7d908570e5345420f87553a7bbdb5d
| 788
|
py
|
Python
|
main.py
|
poltavski/social-network-frontend
|
ccc3410e23e42cfc65efd811aba262ec88163481
|
[
"MIT"
] | null | null | null |
main.py
|
poltavski/social-network-frontend
|
ccc3410e23e42cfc65efd811aba262ec88163481
|
[
"MIT"
] | null | null | null |
main.py
|
poltavski/social-network-frontend
|
ccc3410e23e42cfc65efd811aba262ec88163481
|
[
"MIT"
] | null | null | null |
from fastapi import FastAPI, Request, Response
from fastapi.responses import HTMLResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from utils import get_page_data, process_initial
import uvicorn
app = FastAPI()
templates = Jinja2Templates(directory="templates")
app.mount("/static", StaticFiles(directory="static"), name="static")
@app.get("/", response_class=HTMLResponse)
async def home(request: Request):
# Expect requests with cookies
return process_initial(request)
@app.get("/page", response_class=HTMLResponse)
async def home(request: Request):
# Expect requests with cookies
return get_page_data(request)
if __name__ == "__main__":
uvicorn.run("main:app", host="127.0.0.1", port=8050, log_level="info")
| 29.185185
| 74
| 0.769036
| 100
| 788
| 5.89
| 0.44
| 0.074703
| 0.037351
| 0.101868
| 0.278438
| 0.278438
| 0.278438
| 0.278438
| 0.278438
| 0.278438
| 0
| 0.017316
| 0.120558
| 788
| 26
| 75
| 30.307692
| 0.832612
| 0.072335
| 0
| 0.117647
| 0
| 0
| 0.086538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.352941
| 0
| 0.470588
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
0eed571d3bbd262c5cff9905eccfdacc18b2c6bf
| 4,515
|
py
|
Python
|
Core/Python/create_static_group.py
|
Ku-Al/OpenManage-Enterprise
|
5cc67435d7cedb091edb07311ed9dceeda43277f
|
[
"Apache-2.0"
] | null | null | null |
Core/Python/create_static_group.py
|
Ku-Al/OpenManage-Enterprise
|
5cc67435d7cedb091edb07311ed9dceeda43277f
|
[
"Apache-2.0"
] | null | null | null |
Core/Python/create_static_group.py
|
Ku-Al/OpenManage-Enterprise
|
5cc67435d7cedb091edb07311ed9dceeda43277f
|
[
"Apache-2.0"
] | null | null | null |
#
# Python script using OME API to create a new static group
#
# _author_ = Raajeev Kalyanaraman <[email protected]>
# _version_ = 0.1
#
# Copyright (c) 2020 Dell EMC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
SYNOPSIS:
Script to create a new static group
DESCRIPTION:
This script exercises the OME REST API to create a new static
group. The user is responsible for adding devices to the
group once the group has been successfully created.
For authentication X-Auth is used over Basic Authentication
Note that the credentials entered are not stored to disk.
EXAMPLE:
python create_static_group.py --ip <xx> --user <username>
--password <pwd> --groupname "Random Test Group"
"""
import json
import argparse
from argparse import RawTextHelpFormatter
import urllib3
import requests
def create_static_group(ip_address, user_name, password, group_name):
""" Authenticate with OME and enumerate groups """
try:
session_url = 'https://%s/api/SessionService/Sessions' % ip_address
group_url = "https://%s/api/GroupService/Groups?$filter=Name eq 'Static Groups'" % ip_address
headers = {'content-type': 'application/json'}
user_details = {'UserName': user_name,
'Password': password,
'SessionType': 'API'}
session_info = requests.post(session_url, verify=False,
data=json.dumps(user_details),
headers=headers)
if session_info.status_code == 201:
headers['X-Auth-Token'] = session_info.headers['X-Auth-Token']
response = requests.get(group_url, headers=headers, verify=False)
if response.status_code == 200:
json_data = response.json()
if json_data['@odata.count'] > 0:
# Technically there should be only one result in the filter
group_id = json_data['value'][0]['Id']
group_payload = {"GroupModel": {
"Name": group_name,
"Description": "",
"MembershipTypeId": 12,
"ParentId": int(group_id)}
}
create_url = 'https://%s/api/GroupService/Actions/GroupService.CreateGroup' % ip_address
create_resp = requests.post(create_url, headers=headers,
verify=False,
data=json.dumps(group_payload))
if create_resp.status_code == 200:
print("New group created : ID =", create_resp.text)
elif create_resp.status_code == 400:
print("Failed group creation ...See error info below")
print(json.dumps(create_resp.json(), indent=4,
sort_keys=False))
else:
print("Unable to retrieve group list from %s" % ip_address)
else:
print("Unable to create a session with appliance %s" % ip_address)
except Exception as error:
print("Unexpected error:", str(error))
if __name__ == '__main__':
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter)
parser.add_argument("--ip", "-i", required=True, help="OME Appliance IP")
parser.add_argument("--user", "-u", required=False,
help="Username for OME Appliance", default="admin")
parser.add_argument("--password", "-p", required=True,
help="Password for OME Appliance")
parser.add_argument("--groupname", "-g", required=True,
help="A valid name for the group")
args = parser.parse_args()
create_static_group(args.ip, args.user, args.password, args.groupname)
| 44.70297
| 108
| 0.61041
| 519
| 4,515
| 5.181118
| 0.425819
| 0.024544
| 0.013388
| 0.013388
| 0.080327
| 0.027891
| 0.019338
| 0
| 0
| 0
| 0
| 0.009416
| 0.294352
| 4,515
| 100
| 109
| 45.15
| 0.834589
| 0.287265
| 0
| 0.035088
| 0
| 0
| 0.196922
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017544
| false
| 0.087719
| 0.087719
| 0
| 0.105263
| 0.105263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0ef391d627e7c29662611237b93dc0cbb0bb55b3
| 1,600
|
py
|
Python
|
tests/nls_smoother_test.py
|
sisl/CEEM
|
6154587fe3cdb92e8b7f70eedb1262caa1553cc8
|
[
"MIT"
] | 5
|
2020-06-21T16:50:42.000Z
|
2021-03-14T04:02:01.000Z
|
tests/nls_smoother_test.py
|
sisl/CEEM
|
6154587fe3cdb92e8b7f70eedb1262caa1553cc8
|
[
"MIT"
] | 1
|
2021-03-13T07:46:36.000Z
|
2021-03-16T05:14:47.000Z
|
tests/nls_smoother_test.py
|
sisl/CEEM
|
6154587fe3cdb92e8b7f70eedb1262caa1553cc8
|
[
"MIT"
] | 1
|
2021-03-30T12:08:20.000Z
|
2021-03-30T12:08:20.000Z
|
import torch
from ceem.opt_criteria import *
from ceem.systems import LorenzAttractor
from ceem.dynamics import *
from ceem.smoother import *
from ceem import utils
def test_smoother():
utils.set_rng_seed(1)
torch.set_default_dtype(torch.float64)
sigma = torch.tensor([10.])
rho = torch.tensor([28.])
beta = torch.tensor([8. / 3.])
C = torch.randn(2, 3)
dt = 0.04
sys = LorenzAttractor(sigma, rho, beta, C, dt, method='midpoint')
B = 1
T = 200
xs = [torch.randn(B, 1, 3)]
for t in range(T - 1):
xs.append(sys.step(torch.tensor([0.] * B), xs[-1]))
x = torch.cat(xs, dim=1).detach()
x.requires_grad = True
y = sys.observe(0., x).detach()
# y += torch.rand_like(y) * 0.01
t = torch.stack([torch.arange(T), torch.arange(T)]).to(torch.get_default_dtype())
x0 = torch.zeros_like(x)
obscrit = GaussianObservationCriterion(torch.ones(2), t, y)
dyncrit = GaussianDynamicsCriterion(torch.ones(3), t)
# Test GroupSOSCriterion
crit = GroupSOSCriterion([obscrit, dyncrit])
xsm, metrics = NLSsmoother(x0, crit, sys, solver_kwargs={'verbose': 2, 'tr_rho': 0.})
err = float((xsm - x).norm())
assert err < 1e-8, 'Smoothing Error: %.3e' % err
print('Passed.')
# Test BlockSparseGroupSOSCriterion
crit = BlockSparseGroupSOSCriterion([obscrit, dyncrit])
xsm, metrics = NLSsmoother(torch.zeros_like(x), crit, sys)
err = float((xsm - x).norm())
assert err < 1e-8, 'Smoothing Error: %.3e' % err
print('Passed.')
if __name__ == '__main__':
test_smoother()
| 23.880597
| 89
| 0.6325
| 219
| 1,600
| 4.515982
| 0.420091
| 0.040445
| 0.042467
| 0.030334
| 0.188069
| 0.11729
| 0.11729
| 0.11729
| 0.11729
| 0.11729
| 0
| 0.031721
| 0.211875
| 1,600
| 66
| 90
| 24.242424
| 0.752577
| 0.054375
| 0
| 0.153846
| 0
| 0
| 0.056329
| 0
| 0
| 0
| 0
| 0
| 0.051282
| 1
| 0.025641
| false
| 0.051282
| 0.153846
| 0
| 0.179487
| 0.051282
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
160368ea260cbc50567d2f17656bb9f30dc2af47
| 3,494
|
py
|
Python
|
pydbhub/httphub.py
|
sum3105/pydbhub
|
501ea2c0ec7785bc06a38961a1366c3c04d7fabd
|
[
"MIT"
] | 18
|
2021-06-03T14:27:55.000Z
|
2022-02-25T17:55:33.000Z
|
pydbhub/httphub.py
|
sum3105/pydbhub
|
501ea2c0ec7785bc06a38961a1366c3c04d7fabd
|
[
"MIT"
] | 3
|
2021-06-20T07:17:51.000Z
|
2021-12-10T15:24:19.000Z
|
pydbhub/httphub.py
|
sum3105/pydbhub
|
501ea2c0ec7785bc06a38961a1366c3c04d7fabd
|
[
"MIT"
] | 5
|
2021-06-29T09:50:40.000Z
|
2021-12-31T12:10:57.000Z
|
import pydbhub
from typing import Any, Dict, List, Tuple
from json.decoder import JSONDecodeError
import requests
import io
def send_request_json(query_url: str, data: Dict[str, Any]) -> Tuple[List[Any], str]:
"""
send_request_json sends a request to DBHub.io, formatting the returned result as JSON
Parameters
----------
query_url : str
url of the API endpoint
data : Dict[str, Any]
data to be processed to the server.
Returns
-------
Tuple[List[Any], str]
The returned data is
- a list of JSON object.
- a string describe error if occurs
"""
try:
headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'}
response = requests.post(query_url, data=data, headers=headers)
response.raise_for_status()
return response.json(), None
except JSONDecodeError as e:
return None, e.args[0]
except TypeError as e:
return None, e.args[0]
except requests.exceptions.HTTPError as e:
try:
return response.json(), e.args[0]
except JSONDecodeError:
return None, e.args[0]
except requests.exceptions.RequestException as e:
cause = e.args(0)
return None, str(cause.args[0])
def send_request(query_url: str, data: Dict[str, Any]) -> Tuple[List[bytes], str]:
"""
send_request sends a request to DBHub.io.
Parameters
---- query_url : str
url of the API endpoint
data : Dict[str, Any]
data to be processed to the server.------
Returns
-------
List[bytes]
database file is returned as a list of bytes
"""
try:
headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'}
response = requests.post(query_url, data=data, headers=headers)
response.raise_for_status()
return response.content, None
except requests.exceptions.HTTPError as e:
return None, e.args[0]
except requests.exceptions.RequestException as e:
cause = e.args(0)
return None, str(cause.args[0])
def send_upload(query_url: str, data: Dict[str, Any], db_bytes: io.BufferedReader) -> Tuple[List[Any], str]:
"""
send_upload uploads a database to DBHub.io.
Parameters
----------
query_url : str
url of the API endpoint.
data : Dict[str, Any]
data to be processed to the server.
db_bytes : io.BufferedReader
A buffered binary stream of the database file.
Returns
-------
Tuple[List[Any], str]
The returned data is
- a list of JSON object.
- a string describe error if occurs
"""
try:
headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'}
files = {"file": db_bytes}
response = requests.post(query_url, data=data, headers=headers, files=files)
response.raise_for_status()
if response.status_code != 201:
# The returned status code indicates something went wrong
try:
return response.json(), str(response.status_code)
except JSONDecodeError:
return None, str(response.status_code)
return response.json(), None
except requests.exceptions.HTTPError as e:
try:
return response.json(), e.args[0]
except JSONDecodeError:
return None, e.args[0]
except requests.exceptions.RequestException as e:
cause = e.args(0)
return None, str(cause.args[0])
| 30.649123
| 108
| 0.61763
| 450
| 3,494
| 4.704444
| 0.188889
| 0.030704
| 0.028342
| 0.039679
| 0.720359
| 0.691545
| 0.675012
| 0.643836
| 0.632026
| 0.576287
| 0
| 0.006339
| 0.277619
| 3,494
| 113
| 109
| 30.920354
| 0.832409
| 0.296508
| 0
| 0.703704
| 0
| 0
| 0.054577
| 0.029049
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.092593
| 0
| 0.425926
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1608a15c941a14be0a253388b661310efd0d4787
| 2,834
|
py
|
Python
|
MultirangerTest.py
|
StuartLiam/DroneNavigationOnboard
|
11ac6a301dfc72b15e337ddf09f5ddc79265a03f
|
[
"MIT"
] | null | null | null |
MultirangerTest.py
|
StuartLiam/DroneNavigationOnboard
|
11ac6a301dfc72b15e337ddf09f5ddc79265a03f
|
[
"MIT"
] | null | null | null |
MultirangerTest.py
|
StuartLiam/DroneNavigationOnboard
|
11ac6a301dfc72b15e337ddf09f5ddc79265a03f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2017 Bitcraze AB
#
# Crazyflie Python Library
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Example scipts that allows a user to "push" the Crazyflie 2.0 around
using your hands while it's hovering.
This examples uses the Flow and Multi-ranger decks to measure distances
in all directions and tries to keep away from anything that comes closer
than 0.2m by setting a velocity in the opposite direction.
The demo is ended by either pressing Ctrl-C or by holding your hand above the
Crazyflie.
For the example to run the following hardware is needed:
* Crazyflie 2.0
* Crazyradio PA
* Flow deck
* Multiranger deck
"""
import logging
import sys
import time
import cflib.crtp
from cflib.crazyflie import Crazyflie
from cflib.crazyflie.syncCrazyflie import SyncCrazyflie
from cflib.positioning.motion_commander import MotionCommander
from cflib.utils.multiranger import Multiranger
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import matplotlib.patches as patches
URI = 'radio://0/80/2M'
if len(sys.argv) > 1:
URI = sys.argv[1]
# Only output errors from the logging framework
logging.basicConfig(level=logging.ERROR)
def is_close(range):
MIN_DISTANCE = 0.2 # m
if range is None:
return False
else:
return range < MIN_DISTANCE
if __name__ == '__main__':
# Initialize the low-level drivers (don't list the debug drivers)
cflib.crtp.init_drivers(enable_debug_driver=False)
rangeArray = []
cf = Crazyflie(rw_cache='./cache')
with SyncCrazyflie(URI, cf=cf) as scf:
with MotionCommander(scf) as motion_commander:
with Multiranger(scf) as multiranger:
motion_commander.start_turn_left(90)
rangeArray.append(multiranger.front)
time.sleep(0.05)
plt.plot(rangeArray)
| 31.488889
| 77
| 0.693013
| 380
| 2,834
| 4.915789
| 0.542105
| 0.019272
| 0.020878
| 0.030514
| 0.043897
| 0.029979
| 0
| 0
| 0
| 0
| 0
| 0.017156
| 0.218419
| 2,834
| 90
| 78
| 31.488889
| 0.826185
| 0.590332
| 0
| 0
| 0
| 0
| 0.026643
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0.34375
| 0
| 0.4375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
160c8a87b1d001ed3cb1d85873c9a8a8f238d3b2
| 6,537
|
py
|
Python
|
lessons/sqlite_example/database.py
|
eliranM98/python_course
|
d9431dd6c0f27fca8ca052cc2a821ed0b883136c
|
[
"MIT"
] | 6
|
2019-03-29T06:14:53.000Z
|
2021-10-15T23:42:36.000Z
|
lessons/sqlite_example/database.py
|
eliranM98/python_course
|
d9431dd6c0f27fca8ca052cc2a821ed0b883136c
|
[
"MIT"
] | 4
|
2019-09-06T10:03:40.000Z
|
2022-03-11T23:30:55.000Z
|
lessons/sqlite_example/database.py
|
eliranM98/python_course
|
d9431dd6c0f27fca8ca052cc2a821ed0b883136c
|
[
"MIT"
] | 12
|
2019-06-20T19:34:52.000Z
|
2021-10-15T23:42:39.000Z
|
"""
in this example we want to create a user credentials database with:
user_id & password
logger showing connection logs, DB version, errors during fetching & executing
"""
import sqlite3
from lessons.sqlite_example.log import create as create_logger
class Commands:
create_users_table = '''
CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id text,
password text
);
'''
add_user = 'INSERT INTO users (user_id, password) VALUES (\'{}\', \'{}\');'
get_users = 'SELECT user_id, password FROM users;'
get_user_by_user_id = 'SELECT user_id, password FROM users WHERE user_id = \'{}\';'
get_user_by_id = 'SELECT user_id, password FROM users WHERE id = \'{}\';'''
get_last_user = 'SELECT user_id, password FROM users ORDER BY ID DESC LIMIT 1'
drop_table = 'DROP TABLE IF EXISTS {};'
class DataBase:
""" create a database connection to the SQLite database
specified by db_file
:param db_file: database file
"""
def __init__(self, db_file, log, commands=None):
""" database connection """
try:
self.log = log
self.log.info('connecting to database')
self.connection = sqlite3.connect(db_file)
self.cursor = self.connection.cursor()
self.log.info('connection success')
self.log.info('sqlite3 version {}'.format(sqlite3.version))
if commands is None:
commands = Commands
self.command = commands
except Exception as e:
self.log.exception(e)
raise Exception(e)
def execute(self, command, *args, **kwargs):
try:
return self.cursor.execute(command)
except Exception as e:
self.log.exception(e)
def fetch(self, command=None, *args, **kw):
if command is not None:
self.execute(command)
try:
return self.cursor.fetchall()
except Exception as e:
self.log.exception(e)
def export_from_table_to_file(self, table, file_name, titles, permission='w'):
try:
self.cursor.execute("select * from {}".format(table))
table_list = self.cursor.fetchall()
with open(file_name, permission) as f:
f.write(','.join(titles) + '\n')
for i in table_list:
s = []
for a in i:
s.append(str(a))
f.write(','.join(s) + '\n')
except Exception as e:
self.log.exception(e)
def fetch_log(self, *args, **kw):
rows = self.fetch(*args, **kw)
if rows is not None:
for r in rows:
self.log.info(r)
return rows
class DataBaseExtention(DataBase):
# def get_user_credentials(self, user=None, id=None):
# users = self.fetch(self.command.get_users)
# if user is not None:
# for i in users:
# if user in i:
# return i
# if id is not None:
# return users[id][1:]
# return users[-1][1:]
def get_user_credentials(self, user=None, id=None):
if user is not None:
user_credentials = self.fetch(self.command.get_user_by_user_id.format(user))
elif id is not None:
user_credentials = self.fetch(self.command.get_user_by_id.format(id))
else:
user_credentials = self.fetch(self.command.get_last_user)
if len(user_credentials) > 0:
return user_credentials[0]
if "__main__" == __name__:
import os
log_file = os.path.dirname(os.path.abspath(__file__)) + '\\log.txt'
db_file = os.path.dirname(os.path.abspath(__file__)) + '\\db.db'
log = create_logger(log_file=log_file)
database = DataBaseExtention(db_file, log)
# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# database.execute(database.command.drop_table.format('users'))
# database.execute(database.command.create_users_table)
# database.execute(database.command.add_user.format('cs0008', '123123a'))
# database.execute(database.command.add_user.format('af0006', '123123a'))
# database.execute(database.command.add_user.format('jh0003', '123123a'))
# database.execute(database.command.add_user.format('kb0004', '123123a'))
# database.execute(database.command.add_user.format('op0001', '123123a'))
# database.execute(database.command.add_user.format('gv0001', '123123a'))
# database.execute(database.command.add_user.format('pm0001', '123123a'))
# database.execute(database.command.add_user.format('ps0001', '123123a'))
# database.execute(database.command.add_user.format('qa0000', '123123a'))
# user_credentials = database.get_user_credentials(id='14')
# database.connection.commit()
# database.connection.close()
# print(user_credentials)
# create a simple database with websites table that includes (
# url: varchar(1024),
# popularity_score: integer,
# monthly_visitations: integer
# )
# database.command.create_websites_table = '''
# CREATE TABLE IF NOT EXISTS websites (
# id INTEGER PRIMARY KEY AUTOINCREMENT,
# url TEXT,
# popularity_score INTEGER,
# monthly_visitations INTEGER
# )
# '''
# database.command.add_website = 'INSERT INTO websites (url, popularity_score, monthly_visitations) VALUES (\'{}\', \'{}\', \'{}\');'
# database.execute(database.command.create_websites_table)
# database.execute(database.command.add_website.format('https://www.google.com', 5, 4000000000))
# database.execute(database.command.add_website.format('https://www.ynet.com', 3, 5000000))
# database.execute(database.command.add_website.format('https://www.youtube.com', 6, 1300000000))
# database.execute(database.command.add_website.format('https://www.python.org', 5, 1000000))
# database.command.get_site = 'SELECT url, popularity_score, monthly_visitations FROM websites WHERE url = \'{}\';'
# url, popularity, visitations = database.fetch(database.command.get_site.format('https://www.python.org'))[0]
#
# print(url, popularity, visitations)
database.export_from_table_to_file(
table='websites',
file_name='exported.csv',
titles=('id', 'url', 'popularity_score', 'monthly_visitations')
)
# database.connection.commit()
database.connection.close()
| 39.379518
| 137
| 0.621539
| 778
| 6,537
| 5.062982
| 0.203085
| 0.076161
| 0.093425
| 0.121858
| 0.488195
| 0.385885
| 0.331048
| 0.310485
| 0.133029
| 0.048743
| 0
| 0.02949
| 0.24782
| 6,537
| 165
| 138
| 39.618182
| 0.771609
| 0.420223
| 0
| 0.142857
| 0
| 0
| 0.162038
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0.071429
| 0.035714
| 0
| 0.27381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
1612dd8d2c7befa9fffd9b219b0f1e9b1d9948d5
| 508
|
py
|
Python
|
Dataset/Leetcode/train/7/93.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/train/7/93.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/train/7/93.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
class Solution:
def XXX(self, x: int) -> int:
def solve(x):
a = list(map(int,str(x)))
p = {}
d=0
for ind, val in enumerate(a):
p[ind] = val
for i, v in p.items():
d += v*(10**i)
if (2**31 - 1>= d >= -(2**31)):
return d
else:
return 0
if x>=0:
return (solve(x))
if x<0:
x = -x
return (-solve(x))
| 24.190476
| 43
| 0.324803
| 65
| 508
| 2.538462
| 0.461538
| 0.109091
| 0.048485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054393
| 0.529528
| 508
| 20
| 44
| 25.4
| 0.635983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0
| 0
| 0.368421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1614bfb3f4849c9afe583c49f1da9a5698654285
| 2,648
|
py
|
Python
|
dist/weewx-4.0.0b3/bin/weewx/junk2.py
|
v0rts/docker-weewx
|
70b2f252051dfead4fcb74e74662b297831e6342
|
[
"Apache-2.0"
] | 10
|
2017-01-05T17:30:48.000Z
|
2021-09-18T15:04:20.000Z
|
dist/weewx-4.0.0b3/bin/weewx/junk2.py
|
v0rts/docker-weewx
|
70b2f252051dfead4fcb74e74662b297831e6342
|
[
"Apache-2.0"
] | 2
|
2019-07-21T10:48:42.000Z
|
2022-02-16T20:36:45.000Z
|
dist/weewx-4.0.0b3/bin/weewx/junk2.py
|
v0rts/docker-weewx
|
70b2f252051dfead4fcb74e74662b297831e6342
|
[
"Apache-2.0"
] | 12
|
2017-01-05T18:50:30.000Z
|
2021-10-05T07:35:45.000Z
|
from __future__ import print_function
import time
import weeutil.weeutil
import weewx.manager
import weewx.xtypes
archive_sqlite = {'database_name': '/home/weewx/archive/weepwr.sdb', 'driver': 'weedb.sqlite'}
archive_mysql = {'database_name': 'weewx', 'user': 'weewx', 'password': 'weewx', 'driver': 'weedb.mysql'}
sql_str = "SELECT %s(%s), MIN(usUnits), MAX(usUnits) FROM %s " \
"WHERE dateTime > ? AND dateTime <= ?" % ('avg', 'outTemp', 'archive')
timespan = weeutil.weeutil.TimeSpan(1573245000, 1573246800)
timespan = weeutil.weeutil.TimeSpan(1573245000, 1573245000 + 600)
print('timespan=', timespan)
with weewx.manager.Manager.open(archive_sqlite) as db_manager:
interpolate_dict = {
'aggregate_type': 'diff',
'obs_type': 'ch8_a_energy2',
'table_name': db_manager.table_name,
'start': timespan.start,
'stop': timespan.stop,
}
SQL_TEMPLATE = "SELECT (ch8_a_energy2 - (SELECT ch8_a_energy2 FROM archive WHERE dateTime=%(start)s)) / (%(stop)s - %(start)s) FROM archive WHERE dateTime=%(stop)s;"
SQL_TEMPLATE = """Select a.dateTime as StartTime
, b.dateTime as EndTime
, b.dateTime-a.dateTime as TimeChange
, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange
FROM archive a
Inner Join archive b ON b.dateTime>=1573245000 AND b.dateTime<=(1573245000 + 600)"""
SQL_TEMPLATE = """Select a.dateTime as StartTime, b.datetime as EndTime, b.dateTime-a.dateTime as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange
FROM archive a, archive b WHERE b.dateTime = (Select MAX(c.dateTime) FROM archive c WHERE c.dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);"""
SQL_TEMPLATE = """Select a.dateTime as StartTime, b.datetime as EndTime, b.dateTime-a.dateTime as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange
FROM archive a, archive b WHERE b.dateTime = (Select MAX(dateTime) FROM archive WHERE dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);"""
SQL_TEMPLATE = "SELECT (b.%(obs_type)s - a.%(obs_type)s) / (b.dateTime-a.dateTime) "\
"FROM archive a, archive b "\
"WHERE b.dateTime = (SELECT MAX(dateTime) FROM archive WHERE dateTime <= %(stop)s) "\
"AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime >= %(start)s);"
sql_stmt = SQL_TEMPLATE % interpolate_dict
print(sql_stmt)
# Get the number of records
with db_manager.connection.cursor() as cursor:
for row in cursor.execute(sql_stmt):
print(row)
| 50.923077
| 203
| 0.692976
| 362
| 2,648
| 4.933702
| 0.209945
| 0.073908
| 0.055431
| 0.094065
| 0.532475
| 0.487682
| 0.448488
| 0.448488
| 0.448488
| 0.448488
| 0
| 0.059743
| 0.178248
| 2,648
| 51
| 204
| 51.921569
| 0.761029
| 0.009441
| 0
| 0.05
| 0
| 0.2
| 0.6219
| 0.135063
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.025
| 0.125
| 0
| 0.125
| 0.1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
161a0260062e641dc32fc774ac4b854148c5381e
| 3,310
|
py
|
Python
|
src/requester/py/ElevatorTestCaseList.py
|
akzare/Elevator_Sys_Design
|
2f7d7381d68699515a43ec4cf7a8a8afade726f3
|
[
"MIT"
] | 1
|
2020-09-03T06:36:22.000Z
|
2020-09-03T06:36:22.000Z
|
src/requester/py/ElevatorTestCaseList.py
|
akzare/Elevator_Sys_Design
|
2f7d7381d68699515a43ec4cf7a8a8afade726f3
|
[
"MIT"
] | null | null | null |
src/requester/py/ElevatorTestCaseList.py
|
akzare/Elevator_Sys_Design
|
2f7d7381d68699515a43ec4cf7a8a8afade726f3
|
[
"MIT"
] | null | null | null |
'''
* @file ElevatorTestCaseList.py
* @author Armin Zare Zadeh
* @date 30 July 2020
* @version 0.1
* @brief Implements a class to hold all the test cases during the program life cycle.
'''
#!/usr/bin/env python3
import sys
import ctypes
import ElevatorConfig as cfg
import ElevatorMsgProtocol as msgProto
class ElevatorTestCaseList:
'''
This class builds a test case list out of the configuration
and holds it during the runtime
'''
def __init__(self, config):
self.config = config
self.CallGoTCList = []
def create_testcase_list(self):
'''
Creates a test case list out of the configuration
'''
# ############################################################
# Construct 'call' test cases
for k in self.config.test_case['call'].keys():
msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['call'][k][0],
rx_node_addr = self.config.test_case['call'][k][1],
msg_id = self.config.test_case['call'][k][2],
msg_class = self.config.test_case['call'][k][3],
hdr_len = self.config.network['packet_header_len'],
payload_len = self.config.network['packet_payload_req_len'])
self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr,
time_tag = self.config.test_case['call'][k][4],
req_typ = self.config.usr_request['call'],
floor_num = self.config.test_case['call'][k][5],
direction = self.config.test_case['call'][k][6],
go_msg_id = self.config.test_case['call'][k][7],
state = msgProto.CallGoState.READY2GO))
# ############################################################
# Construct 'go' test cases
for k in self.config.test_case['go'].keys():
msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['go'][k][0],
rx_node_addr = self.config.test_case['go'][k][1],
msg_id = self.config.test_case['go'][k][2],
msg_class = self.config.test_case['go'][k][3],
hdr_len = self.config.network['packet_header_len'],
payload_len = self.config.network['packet_payload_req_len'])
self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr,
time_tag = self.config.test_case['go'][k][4],
req_typ = self.config.usr_request['go'],
floor_num = self.config.test_case['go'][k][5],
direction = 0,
go_msg_id = 0,
state = msgProto.CallGoState.RESET))
| 50.151515
| 105
| 0.459517
| 327
| 3,310
| 4.461774
| 0.30581
| 0.164496
| 0.15353
| 0.197395
| 0.654558
| 0.650446
| 0.583962
| 0.583962
| 0.396162
| 0.309801
| 0
| 0.013205
| 0.405136
| 3,310
| 65
| 106
| 50.923077
| 0.727781
| 0.124773
| 0
| 0.162162
| 0
| 0
| 0.049156
| 0.016141
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054054
| false
| 0
| 0.108108
| 0
| 0.189189
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16262857a0ab051d70328d47ffe56eedbe48f8d3
| 1,259
|
py
|
Python
|
tpp/controller/ConversionController.py
|
pennyarcade/TPPP
|
9bb6db774d77f74c54ed2fa004e97c1aa114fff9
|
[
"MIT"
] | null | null | null |
tpp/controller/ConversionController.py
|
pennyarcade/TPPP
|
9bb6db774d77f74c54ed2fa004e97c1aa114fff9
|
[
"MIT"
] | null | null | null |
tpp/controller/ConversionController.py
|
pennyarcade/TPPP
|
9bb6db774d77f74c54ed2fa004e97c1aa114fff9
|
[
"MIT"
] | null | null | null |
"""
Implements a non interactive controller to controt non-interactive visualizers.
(i.e. those that are used for converting TPP souce code into another format)
"""
from tpp.FileParser import FileParser
from tpp.controller.TPPController import TPPController
class ConversionController(TPPController):
"""
Implements a non interactive controller to run non-interactive visualizers.
(i.e. those that are used for converting TPP source code into another format)
"""
def __init__(self, input_file, output, visualizer_class):
"""
Todo: ApiDoc.
:rtype:
:param input:
:param output:
:param visualizer_class:
"""
super(ConversionController, self).__init__()
parser = FileParser(input_file)
self.pages = parser.get_pages()
self.vis = visualizer_class(output)
def run(self):
"""
Todo: ApiDoc.
:return:
"""
for page in self.pages:
while True:
eop = page.is_eop()
self.vis.visualize(page.next_line(), eop)
if eop:
break
def close(self):
"""
Todo: ApiDoc.
:return:
"""
self.vis.close()
| 24.686275
| 81
| 0.590151
| 134
| 1,259
| 5.425373
| 0.440299
| 0.077029
| 0.038514
| 0.068776
| 0.264099
| 0.264099
| 0.162311
| 0.162311
| 0.162311
| 0.162311
| 0
| 0
| 0.317712
| 1,259
| 50
| 82
| 25.18
| 0.846333
| 0.347101
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 0
| 1
| 0.176471
| false
| 0
| 0.117647
| 0
| 0.352941
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1627fcf089cd43ce83004fbce276962343e2f2c7
| 785
|
py
|
Python
|
wow/wow.py
|
brisberg/Kiri-Cogs
|
9a5307ff8fbaa5e0560ec518cf26df52347da98d
|
[
"MIT"
] | null | null | null |
wow/wow.py
|
brisberg/Kiri-Cogs
|
9a5307ff8fbaa5e0560ec518cf26df52347da98d
|
[
"MIT"
] | null | null | null |
wow/wow.py
|
brisberg/Kiri-Cogs
|
9a5307ff8fbaa5e0560ec518cf26df52347da98d
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands
class WowCog:
"""Custom Cog that had commands for WoW Memes"""
def __init__(self, bot):
self.bot = bot
async def _play(self, url, ctx):
"""Helper for aliasing Play in the Audio module"""
audio = self.bot.get_cog('Audio')
if not audio:
await self.bot.say("Audio module required. Load with: {}load audio".format(ctx.prefix))
return
await ctx.invoke(audio.play, url_or_search_terms=url)
@commands.command(pass_context=True, no_pm=True)
async def flamewreath(self, ctx):
"""I will not move when Flame Wreath is cast!"""
await self._play("https://www.youtube.com/watch?v=gcA6y7sxKcA", ctx)
def setup(bot):
bot.add_cog(WowCog(bot))
| 29.074074
| 99
| 0.64586
| 113
| 785
| 4.371681
| 0.59292
| 0.05668
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003333
| 0.235669
| 785
| 26
| 100
| 30.192308
| 0.82
| 0.053503
| 0
| 0
| 0
| 0
| 0.147105
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0.0625
| 0.125
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
1631aec82f9bb8a63392680178fdfa614b25b1c9
| 10,654
|
py
|
Python
|
shardDesigner/shardTemplateDir/shardStemDir/log/elast.py
|
vinci-project/rootShard
|
2f6633c7fb1c1b690c0a38ffbb16af0b50d532bb
|
[
"MIT"
] | null | null | null |
shardDesigner/shardTemplateDir/shardStemDir/log/elast.py
|
vinci-project/rootShard
|
2f6633c7fb1c1b690c0a38ffbb16af0b50d532bb
|
[
"MIT"
] | 7
|
2020-03-02T11:23:41.000Z
|
2022-03-11T23:52:51.000Z
|
shardDesigner/shardTemplateDir/shardStemDir/log/elast.py
|
vinci-project/rootShard
|
2f6633c7fb1c1b690c0a38ffbb16af0b50d532bb
|
[
"MIT"
] | null | null | null |
import elasticsearch
from elasticsearch import Elasticsearch
from elasticsearch import helpers
import time, json, datetime, os
class elalog:
def __init__(self, date):
es_host = os.getenv("ES_PORT_9200_TCP_ADDR") or '<%ELASTICIP%>'
es_port = os.getenv("ES_PORT_9200_TCP_PORT") or '9200'
self.lastDate = date
self.es = Elasticsearch([{'host': es_host, 'port': es_port}])
# BLOCKS INDEX
self.blocks_index_name = "blocks-" + date
self.block_mapping = {
"settings": {
"number_of_shards": 5,
"number_of_replicas": 0
},
"mappings": {
"blocks-" + date: {
"properties": {
"@dtime": {
"type": "date",
"format": "epoch_second"
},
"hash": {
"type": "text"
},
"signatures": {
"type": "text"
},
"tcount": {
"type": "long"
},
"validator": {
"type": "text",
"fielddata": True
},
"bheight": {
"type": "long"
}
}
}
}
}
if self.es.indices.exists(self.blocks_index_name):
try:
self.es.indices.delete(index=self.blocks_index_name)
self.es.indices.create(index=self.blocks_index_name, body=self.block_mapping)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on create Indicies:", es1)
else:
self.es.indices.create(index=self.blocks_index_name, body=self.block_mapping)
# TRANSACTIONS INDEX
self.transactions_index_name = "transactions-" + date
self.transactions_mapping = {
"settings": {
"number_of_shards": 5,
"number_of_replicas": 0
},
"mappings": {
"transactions-" + date: {
"properties": {
"@dtime": {
"type": "date",
"format": "epoch_second"
},
"sender": {
"type": "text",
"fielddata": True
},
"receiver": {
"type": "text",
"fielddata": True
},
"token_count": {
"type": "float"
},
"token_type": {
"type": "text",
"fielddata": True
},
"hash": {
"type": "text"
},
"block": {
"type": "long"
}
}
}
}
}
if self.es.indices.exists(self.transactions_index_name):
try:
self.es.indices.delete(index=self.transactions_index_name)
self.es.indices.create(index=self.transactions_index_name, body=self.transactions_mapping)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on create Indicies:", es1)
else:
self.es.indices.create(index=self.transactions_index_name, body=self.transactions_mapping)
# BALANCE HISTORY
self.balance_index_name = "balance"
self.balance_mapping = {
"settings": {
"number_of_shards": 5,
"number_of_replicas": 0
},
"mappings": {
"balance": {
"properties": {
"@dtime": {
"type": "date",
"format": "epoch_second"
},
"user": {
"type": "text",
"fielddata": True
},
"balance": {
"type": "float"
}
}
}
}
}
if self.es.indices.exists(self.balance_index_name):
try:
self.es.indices.delete(index=self.balance_index_name)
self.es.indices.create(index=self.balance_index_name, body=self.balance_mapping)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on create Indicies:", es1)
else:
self.es.indices.create(index=self.balance_index_name, body=self.balance_mapping)
# VALIDATOR STATISTIC
self.clients_index_name = "clients"
self.clients_mapping = {
"settings": {
"number_of_shards": 5,
"number_of_replicas": 0
},
"mappings": {
"clients": {
"properties": {
"@dtime": {
"type": "date",
"format": "epoch_second"
},
"ip": {
"type": "ip"
},
"geoip": {
"properties": {
"city_name": {
"type": "text"
},
"continent_name": {
"type": "text"
},
"country_iso_code": {
"type": "text"
},
"location": {
"type": "geo_point"
},
"region_name": {
"type": "text"
}
}
},
"public_key": {
"type": "text",
"fielddata": True
},
"client_type": {
"type": "text",
"fielddata": True
}
}
}
}
}
if self.es.indices.exists(self.clients_index_name):
try:
self.es.indices.delete(index=self.clients_index_name)
self.es.indices.create(index=self.clients_index_name, body=self.clients_mapping)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on create Indicies:", es1)
else:
self.es.indices.create(index=self.clients_index_name, body=self.clients_mapping)
def elasticClients(self, jsons:list):
try:
helpers.bulk(self.es, jsons)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on save Validators:", es1)
print("Save Validators in elastic!")
def elasticBlock(self, timestamp:float, validator:str, tcount:int, signatures:list, hash:str, bheight:int):
index = 'blocks-' + self.lastDate
estype = 'blocks-' + self.lastDate
eljson = json.dumps({"@dtime": int(timestamp), "validator": validator, "tcount": tcount, "signatures": list(signatures), "hash": hash, "bheight": bheight}, separators=(',', ':'))
try:
self.es.index(index=str(index).lower(), doc_type=estype.lower(), body=eljson)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on send Block:", es1)
def elasticTransaction(self, jsons:list):
try:
helpers.bulk(self.es, jsons)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on save bulk Transactions:", es1)
def elasticBalanceHistory(self, balance:dict):
users = balance.keys()
jsonMas = []
print("USER LEN:", len(users))
for user in users:
eljson = {"_index": "balance", "_type": "balance", "_id": user,
"_source": {"@dtime": int(time.time()), "user": user,
"balance": balance.get(user)}}
jsonMas.append(eljson)
try:
helpers.bulk(self.es, jsonMas)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on save balance:", es1)
def getLastEBlock(self):
query = {"aggs" : {
"max_blnum":{"max":{"field":"bheight"}}
},"size": 0
}
try:
answer = self.es.search(index="blocks-" + self.lastDate, doc_type="blocks-" + self.lastDate, body=query)
if not answer["aggregations"]["max_blnum"]["value"] == None:
return int(answer["aggregations"]["max_blnum"]["value"])
else:
return 0
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on search last block index:", es1)
| 41.455253
| 186
| 0.382016
| 728
| 10,654
| 5.443681
| 0.190934
| 0.033308
| 0.052485
| 0.097653
| 0.598789
| 0.524098
| 0.500883
| 0.480696
| 0.432753
| 0.357557
| 0
| 0.007849
| 0.521682
| 10,654
| 256
| 187
| 41.617188
| 0.769819
| 0.006289
| 0
| 0.436681
| 0
| 0
| 0.142695
| 0.003969
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026201
| false
| 0
| 0.017467
| 0
| 0.056769
| 0.048035
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
163248c24fc9b2b48d8f714d22251c83d3496af1
| 2,694
|
py
|
Python
|
dialogue-engine/test/programytest/config/brain/test_oob.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 104
|
2020-03-30T09:40:00.000Z
|
2022-03-06T22:34:25.000Z
|
dialogue-engine/test/programytest/config/brain/test_oob.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 25
|
2020-06-12T01:36:35.000Z
|
2022-02-19T07:30:44.000Z
|
dialogue-engine/test/programytest/config/brain/test_oob.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 10
|
2020-04-02T23:43:56.000Z
|
2021-05-14T13:47:01.000Z
|
"""
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import unittest
from programy.config.file.yaml_file import YamlConfigurationFile
from programy.config.brain.oob import BrainOOBConfiguration
from programy.clients.events.console.config import ConsoleConfiguration
class BrainOOBConfigurationTests(unittest.TestCase):
def test_oob_with_data(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
oobs:
default:
classname: programy.oob.defaults.default.DefaultOutOfBandProcessor
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
self.assertIsNotNone(brain_config)
oobs_config = yaml.get_section("oobs", brain_config)
self.assertIsNotNone(oobs_config)
oob_config = BrainOOBConfiguration("default")
oob_config.load_config_section(yaml, oobs_config, ".")
self.assertEqual("programy.oob.defaults.default.DefaultOutOfBandProcessor", oob_config.classname)
def test_default_without_data(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
oobs:
default:
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
self.assertIsNotNone(brain_config)
oobs_config = yaml.get_section("oobs", brain_config)
self.assertIsNotNone(oobs_config)
oob_config = BrainOOBConfiguration("default")
oob_config.load_config_section(yaml, oobs_config, ".")
self.assertIsNone(oob_config.classname)
| 42.761905
| 126
| 0.72977
| 325
| 2,694
| 5.935385
| 0.396923
| 0.045619
| 0.026957
| 0.041472
| 0.381545
| 0.328668
| 0.328668
| 0.328668
| 0.328668
| 0.328668
| 0
| 0.001849
| 0.197105
| 2,694
| 62
| 127
| 43.451613
| 0.889968
| 0.394209
| 0
| 0.722222
| 0
| 0
| 0.188424
| 0.067734
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.055556
| false
| 0
| 0.111111
| 0
| 0.194444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
163cbfb7a11f70465bec9d58e23cdc35d6fe4e2c
| 5,976
|
py
|
Python
|
v1/hsvfilter.py
|
gavinIRL/RHBot
|
1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf
|
[
"MIT"
] | null | null | null |
v1/hsvfilter.py
|
gavinIRL/RHBot
|
1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf
|
[
"MIT"
] | 60
|
2021-03-29T14:29:49.000Z
|
2021-05-03T06:06:19.000Z
|
v1/hsvfilter.py
|
gavinIRL/RHBot
|
1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf
|
[
"MIT"
] | null | null | null |
import typing
# custom data structure to hold the state of an HSV filter
class HsvFilter:
def __init__(self, hMin=None, sMin=None, vMin=None, hMax=None, sMax=None, vMax=None,
sAdd=None, sSub=None, vAdd=None, vSub=None):
self.hMin = hMin
self.sMin = sMin
self.vMin = vMin
self.hMax = hMax
self.sMax = sMax
self.vMax = vMax
self.sAdd = sAdd
self.sSub = sSub
self.vAdd = vAdd
self.vSub = vSub
# Putting this here out of the way as it's a chonk
# For a given item string case it will return the optimal filter and the correct position to look
def grab_object_preset(object_name=None, **kwargs) -> typing.Tuple[HsvFilter, list]:
if object_name is None:
#print("Using default filter")
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0), [3, 32, 1280, 794]
if object_name == "dungeon_check":
return HsvFilter(0, 73, 94, 106, 255, 255, 0, 0, 0, 0), [1083, 295, 1188, 368]
if object_name == "enemy_map_loc":
#print("Using enemy location filter")
if kwargs.get("big_map"):
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [485, 280, 900, 734]
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [1100, 50, 1260, 210]
if object_name == "player_map_loc":
if kwargs.get("big_map"):
return HsvFilter(31, 94, 86, 73, 255, 255, 0, 0, 0, 0), [485, 280, 900, 734]
return HsvFilter(31, 94, 86, 73, 255, 255, 0, 0, 0, 0), [1100, 50, 1260, 210]
if object_name == "other_player_map_loc":
if kwargs.get("big_map"):
return HsvFilter(16, 172, 194, 32, 255, 255, 0, 0, 70, 37), [485, 280, 900, 734]
return HsvFilter(16, 172, 194, 32, 255, 255, 0, 0, 70, 37), [1100, 50, 1260, 210]
if object_name == "loot_distant":
return HsvFilter(14, 116, 33, 32, 210, 59, 16, 0, 3, 0), [10, 145, 1084, 684]
if object_name == "loot_near":
return HsvFilter(0, 155, 135, 31, 240, 217, 0, 0, 0, 0), [460, 420, 855, 710]
if object_name == "prompt_press_x_pickup":
return HsvFilter(78, 110, 110, 97, 189, 255, 0, 0, 0, 0), [1080, 660, 1255, 725]
if object_name == "message_section_cleared":
return HsvFilter(0, 0, 214, 179, 65, 255, 0, 0, 0, 17), [464, 600, 855, 680]
if object_name == "message_go":
return HsvFilter(32, 114, 89, 58, 255, 255, 0, 12, 0, 0), [600, 222, 700, 275]
if object_name == "enemy_nametag":
return HsvFilter(49, 0, 139, 91, 30, 197, 0, 0, 40, 38), [10, 145, 1084, 684]
if object_name == "message_boss_encounter":
return HsvFilter(0, 92, 128, 13, 255, 255, 0, 0, 0, 0), [630, 520, 1120, 680]
if object_name == "display_boss_name_and_healthbar":
return HsvFilter(0, 92, 123, 29, 255, 255, 0, 0, 0, 20), [415, 533, 888, 700]
if object_name == "loot_chest_normal":
# This is a difficult one to separate
return HsvFilter(0, 34, 38, 28, 152, 124, 0, 0, 5, 12), [10, 145, 1084, 684]
if object_name == "map_outline":
if kwargs.get("big_map"):
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [485, 280, 900, 734]
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [1100, 50, 1260, 210]
if object_name == "gate_map_pos":
# This is a very difficult one to separate
if kwargs.get("big_map"):
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [485, 280, 900, 734]
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [1100, 50, 1260, 210]
if object_name == "prompt_move_reward_screen":
return HsvFilter(72, 98, 92, 105, 255, 225, 0, 54, 24, 38)
if object_name == "prompt_select_card":
return HsvFilter(79, 149, 140, 255, 255, 255, 0, 0, 0, 0)
if object_name == "event_chest_special_appear":
return HsvFilter(0, 124, 62, 88, 217, 246, 0, 0, 0, 0)
if object_name == "inventory_green_item":
return HsvFilter(37, 147, 0, 61, 255, 255, 0, 0, 0, 0)
if object_name == "inventory_blue_item":
return HsvFilter(79, 169, 0, 109, 246, 188, 0, 0, 0, 0)
if object_name == "inventory_yellow_item":
# This is a dangerous one as it can barely
# distinguish against green items and vice versa
return HsvFilter(19, 91, 107, 31, 168, 181, 0, 11, 32, 21)
if object_name == "inventory_purple_item":
return HsvFilter(126, 153, 0, 255, 255, 255, 0, 0, 0, 0)
if object_name == "button_repair":
return None, [208, 600]
# These are all To be done later
if object_name == "event_card_trade":
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0)
if object_name == "event_otherworld":
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0)
if object_name == "loot_chest_special":
if kwargs.get("big_map"):
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0), [10, 145, 1084, 684]
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0), [10, 145, 1084, 684]
if object_name == "cards":
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0), [735, 32, 1085, 100]
if object_name == "enemy_arrow":
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0), [10, 145, 1084, 684]
# Buttons for clicking, known positions
if object_name == "button_explore_again":
return None, []
if object_name == "button_choose_map":
return None, []
if object_name == "button_open_store":
return None, []
if object_name == "button_go_town":
return None, []
if object_name == "button_inv_equipment":
return None, []
if object_name == "button_inv_consume":
return None, []
if object_name == "button_inv_other":
return None, []
if object_name == "button_repair_confirm":
return None, []
if object_name == "inv_grid_location":
return None, [533+44*kwargs.get("col"), 277+44*kwargs.get("row")]
| 49.38843
| 97
| 0.593373
| 954
| 5,976
| 3.591195
| 0.285115
| 0.045534
| 0.039405
| 0.021016
| 0.430823
| 0.405721
| 0.360187
| 0.304437
| 0.271454
| 0.265032
| 0
| 0.218297
| 0.261044
| 5,976
| 120
| 98
| 49.8
| 0.557518
| 0.083668
| 0
| 0.242718
| 0
| 0
| 0.124085
| 0.038616
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019417
| false
| 0
| 0.009709
| 0
| 0.466019
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
163d903313e3ca0e241b2c27dfd7fddcb15bbfdb
| 287
|
py
|
Python
|
ecommerce_api/core/cart/exceptions.py
|
victormartinez/ecommerceapi
|
a887d9e938050c15ebf52001f63d7aa7f33fa5ee
|
[
"MIT"
] | null | null | null |
ecommerce_api/core/cart/exceptions.py
|
victormartinez/ecommerceapi
|
a887d9e938050c15ebf52001f63d7aa7f33fa5ee
|
[
"MIT"
] | null | null | null |
ecommerce_api/core/cart/exceptions.py
|
victormartinez/ecommerceapi
|
a887d9e938050c15ebf52001f63d7aa7f33fa5ee
|
[
"MIT"
] | null | null | null |
from typing import Iterable, Optional
class ProductsNotFound(Exception):
def __init__(self, product_ids: Optional[Iterable[int]] = None):
self.product_ids = product_ids or []
self.message = "One or more products are invalid."
super().__init__(self.message)
| 31.888889
| 68
| 0.700348
| 35
| 287
| 5.428571
| 0.657143
| 0.157895
| 0.147368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.198606
| 287
| 8
| 69
| 35.875
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0.114983
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
163dc7048c89ab3ce7a0707b33435bed5fbe6660
| 6,742
|
py
|
Python
|
test/unit/test_record.py
|
jsoref/neo4j-python-driver
|
32c130c9a975dbf8c0d345b362d096b5e1dd3e5b
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_record.py
|
jsoref/neo4j-python-driver
|
32c130c9a975dbf8c0d345b362d096b5e1dd3e5b
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_record.py
|
jsoref/neo4j-python-driver
|
32c130c9a975dbf8c0d345b362d096b5e1dd3e5b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright (c) 2002-2018 "Neo Technology,"
# Network Engine for Objects in Lund AB [http://neotechnology.com]
#
# This file is part of Neo4j.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
from neo4j.v1 import Record
class RecordTestCase(TestCase):
def test_record_equality(self):
record1 = Record(["name", "empire"], ["Nigel", "The British Empire"])
record2 = Record(["name", "empire"], ["Nigel", "The British Empire"])
record3 = Record(["name", "empire"], ["Stefan", "Das Deutschland"])
assert record1 == record2
assert record1 != record3
assert record2 != record3
def test_record_hashing(self):
record1 = Record(["name", "empire"], ["Nigel", "The British Empire"])
record2 = Record(["name", "empire"], ["Nigel", "The British Empire"])
record3 = Record(["name", "empire"], ["Stefan", "Das Deutschland"])
assert hash(record1) == hash(record2)
assert hash(record1) != hash(record3)
assert hash(record2) != hash(record3)
def test_record_iter(self):
a_record = Record(["name", "empire"], ["Nigel", "The British Empire"])
assert list(a_record.__iter__()) == ["name", "empire"]
def test_record_copy(self):
original = Record(["name", "empire"], ["Nigel", "The British Empire"])
duplicate = original.copy()
assert dict(original) == dict(duplicate)
assert original.keys() == duplicate.keys()
assert original is not duplicate
def test_record_as_dict(self):
a_record = Record(["name", "empire"], ["Nigel", "The British Empire"])
assert dict(a_record) == {"name": "Nigel", "empire": "The British Empire"}
def test_record_as_list(self):
a_record = Record(["name", "empire"], ["Nigel", "The British Empire"])
assert list(a_record) == ["name", "empire"]
def test_record_len(self):
a_record = Record(["name", "empire"], ["Nigel", "The British Empire"])
assert len(a_record) == 2
def test_record_repr(self):
a_record = Record(["name", "empire"], ["Nigel", "The British Empire"])
assert repr(a_record) == "<Record name='Nigel' empire='The British Empire'>"
def test_record_data(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.data(), {"name": "Alice", "age": 33, "married": True})
self.assertEqual(r.data("name"), {"name": "Alice"})
self.assertEqual(r.data("age", "name"), {"age": 33, "name": "Alice"})
self.assertEqual(r.data("age", "name", "shoe size"), {"age": 33, "name": "Alice", "shoe size": None})
self.assertEqual(r.data(0, "name"), {"name": "Alice"})
self.assertEqual(r.data(0), {"name": "Alice"})
self.assertEqual(r.data(1, 0), {"age": 33, "name": "Alice"})
with self.assertRaises(IndexError):
_ = r.data(1, 0, 999)
def test_record_keys(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.keys(), ("name", "age", "married"))
def test_record_values(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.values(), ("Alice", 33, True))
self.assertEqual(r.values("name"), ("Alice",))
self.assertEqual(r.values("age", "name"), (33, "Alice"))
self.assertEqual(r.values("age", "name", "shoe size"), (33, "Alice", None))
self.assertEqual(r.values(0, "name"), ("Alice", "Alice"))
self.assertEqual(r.values(0), ("Alice",))
self.assertEqual(r.values(1, 0), (33, "Alice"))
with self.assertRaises(IndexError):
_ = r.values(1, 0, 999)
def test_record_items(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.items(), [("name", "Alice"), ("age", 33), ("married", True)])
self.assertEqual(r.items("name"), [("name", "Alice")])
self.assertEqual(r.items("age", "name"), [("age", 33), ("name", "Alice")])
self.assertEqual(r.items("age", "name", "shoe size"), [("age", 33), ("name", "Alice"), ("shoe size", None)])
self.assertEqual(r.items(0, "name"), [("name", "Alice"), ("name", "Alice")])
self.assertEqual(r.items(0), [("name", "Alice")])
self.assertEqual(r.items(1, 0), [("age", 33), ("name", "Alice")])
with self.assertRaises(IndexError):
_ = r.items(1, 0, 999)
def test_record_index(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.index("name"), 0)
self.assertEqual(r.index("age"), 1)
self.assertEqual(r.index("married"), 2)
with self.assertRaises(KeyError):
_ = r.index("shoe size")
self.assertEqual(r.index(0), 0)
self.assertEqual(r.index(1), 1)
self.assertEqual(r.index(2), 2)
with self.assertRaises(IndexError):
_ = r.index(3)
with self.assertRaises(TypeError):
_ = r.index(None)
def test_record_value(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.value(), "Alice")
self.assertEqual(r.value("name"), "Alice")
self.assertEqual(r.value("age"), 33)
self.assertEqual(r.value("married"), True)
self.assertEqual(r.value("shoe size"), None)
self.assertEqual(r.value("shoe size", 6), 6)
self.assertEqual(r.value(0), "Alice")
self.assertEqual(r.value(1), 33)
self.assertEqual(r.value(2), True)
self.assertEqual(r.value(3), None)
self.assertEqual(r.value(3, 6), 6)
with self.assertRaises(TypeError):
_ = r.value(None)
def test_record_contains(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertTrue("name" in r)
self.assertTrue("age" in r)
self.assertTrue("married" in r)
self.assertFalse("shoe size" in r)
self.assertTrue(0 in r)
self.assertTrue(1 in r)
self.assertTrue(2 in r)
self.assertFalse(3 in r)
with self.assertRaises(TypeError):
_ = r.index(None)
| 43.496774
| 116
| 0.590923
| 846
| 6,742
| 4.64539
| 0.165485
| 0.148855
| 0.158779
| 0.080153
| 0.628753
| 0.508142
| 0.436896
| 0.370992
| 0.35369
| 0.313995
| 0
| 0.024214
| 0.222041
| 6,742
| 154
| 117
| 43.779221
| 0.725072
| 0.103975
| 0
| 0.234783
| 0
| 0
| 0.17566
| 0
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.130435
| false
| 0
| 0.017391
| 0
| 0.156522
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
164ff194ddd6475fcc83a8af8f5b4d32701c55ea
| 886
|
py
|
Python
|
pymterm/colour/tango.py
|
stonewell/pymterm
|
af36656d5f7fb008533178d14b00d83d72ba00cf
|
[
"MIT"
] | 102
|
2016-07-21T06:39:02.000Z
|
2022-03-09T19:34:03.000Z
|
pymterm/colour/tango.py
|
stonewell/pymterm
|
af36656d5f7fb008533178d14b00d83d72ba00cf
|
[
"MIT"
] | 2
|
2017-01-11T13:43:34.000Z
|
2020-01-19T12:06:47.000Z
|
pymterm/colour/tango.py
|
stonewell/pymterm
|
af36656d5f7fb008533178d14b00d83d72ba00cf
|
[
"MIT"
] | 4
|
2020-03-22T04:08:35.000Z
|
2021-06-27T23:38:02.000Z
|
TANGO_PALLETE = [
'2e2e34343636',
'cccc00000000',
'4e4e9a9a0606',
'c4c4a0a00000',
'34346565a4a4',
'757550507b7b',
'060698989a9a',
'd3d3d7d7cfcf',
'555557575353',
'efef29292929',
'8a8ae2e23434',
'fcfce9e94f4f',
'72729f9fcfcf',
'adad7f7fa8a8',
'3434e2e2e2e2',
'eeeeeeeeecec',
]
def parse_tango_color(c):
r = int(c[:4][:2], 16)
g = int(c[4:8][:2], 16)
b = int(c[8:][:2], 16)
return [r, g, b, 0xFF]
def apply_color(cfg, color_table):
cfg.default_foreground_color = parse_tango_color('eeeeeeeeecec')
cfg.default_background_color = parse_tango_color('323232323232')
cfg.default_cursor_color = cfg.default_foreground_color
for i in range(len(TANGO_PALLETE)):
if i < len(color_table):
color_table[i] = parse_tango_color(TANGO_PALLETE[i])
| 24.611111
| 69
| 0.613995
| 98
| 886
| 5.316327
| 0.469388
| 0.076775
| 0.115163
| 0.095969
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.215569
| 0.24605
| 886
| 35
| 70
| 25.314286
| 0.564371
| 0
| 0
| 0
| 0
| 0
| 0.254118
| 0
| 0
| 0
| 0.004706
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1653cd2fffd32e2ad6ea59e14f67f33d48afc170
| 560
|
py
|
Python
|
examples/django_mongoengine/bike/models.py
|
pfrantz/graphene-mongo
|
f7d4f3e194ec41793e6da547934c34e11fd9ef51
|
[
"MIT"
] | 260
|
2018-02-03T01:00:42.000Z
|
2022-02-18T12:42:01.000Z
|
examples/django_mongoengine/bike/models.py
|
pfrantz/graphene-mongo
|
f7d4f3e194ec41793e6da547934c34e11fd9ef51
|
[
"MIT"
] | 159
|
2018-02-09T07:35:03.000Z
|
2022-03-20T03:43:23.000Z
|
examples/django_mongoengine/bike/models.py
|
pfrantz/graphene-mongo
|
f7d4f3e194ec41793e6da547934c34e11fd9ef51
|
[
"MIT"
] | 124
|
2018-02-04T20:19:01.000Z
|
2022-03-25T21:40:41.000Z
|
from mongoengine import Document
from mongoengine.fields import (
FloatField,
StringField,
ListField,
URLField,
ObjectIdField,
)
class Shop(Document):
meta = {"collection": "shop"}
ID = ObjectIdField()
name = StringField()
address = StringField()
website = URLField()
class Bike(Document):
meta = {"collection": "bike"}
ID = ObjectIdField()
name = StringField()
brand = StringField()
year = StringField()
size = ListField(StringField())
wheel_size = FloatField()
type = StringField()
| 20
| 35
| 0.642857
| 49
| 560
| 7.326531
| 0.489796
| 0.083565
| 0.122563
| 0.167131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.241071
| 560
| 27
| 36
| 20.740741
| 0.844706
| 0
| 0
| 0.173913
| 0
| 0
| 0.05
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.086957
| 0
| 0.73913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1654fce2866f6b2ef021c29092efa26419e5ba83
| 4,918
|
py
|
Python
|
uhd_restpy/testplatform/sessions/ixnetwork/impairment/profile/fixedclassifier/fixedclassifier.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 20
|
2019-05-07T01:59:14.000Z
|
2022-02-11T05:24:47.000Z
|
uhd_restpy/testplatform/sessions/ixnetwork/impairment/profile/fixedclassifier/fixedclassifier.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 60
|
2019-04-03T18:59:35.000Z
|
2022-02-22T12:05:05.000Z
|
uhd_restpy/testplatform/sessions/ixnetwork/impairment/profile/fixedclassifier/fixedclassifier.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 13
|
2019-05-20T10:48:31.000Z
|
2021-10-06T07:45:44.000Z
|
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
from typing import List, Any, Union
class FixedClassifier(Base):
"""Specifies the packets to apply this profile to. If there are multiple patterns enabled, they are ANDed: each packet must match all packets in order to be impaired by this profile.
The FixedClassifier class encapsulates a list of fixedClassifier resources that are managed by the user.
A list of resources can be retrieved from the server using the FixedClassifier.find() method.
The list can be managed by using the FixedClassifier.add() and FixedClassifier.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'fixedClassifier'
_SDM_ATT_MAP = {
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(FixedClassifier, self).__init__(parent, list_op)
@property
def Pattern(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.impairment.profile.fixedclassifier.pattern.pattern.Pattern): An instance of the Pattern class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.impairment.profile.fixedclassifier.pattern.pattern import Pattern
if self._properties.get('Pattern', None) is not None:
return self._properties.get('Pattern')
else:
return Pattern(self)
def add(self):
"""Adds a new fixedClassifier resource on the server and adds it to the container.
Returns
-------
- self: This instance with all currently retrieved fixedClassifier resources using find and the newly added fixedClassifier resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained fixedClassifier resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self):
"""Finds and retrieves fixedClassifier resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve fixedClassifier resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all fixedClassifier resources from the server.
Returns
-------
- self: This instance with matching fixedClassifier resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of fixedClassifier data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the fixedClassifier resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| 41.677966
| 187
| 0.700895
| 625
| 4,918
| 5.448
| 0.3536
| 0.044934
| 0.030543
| 0.033774
| 0.313069
| 0.249633
| 0.249633
| 0.249633
| 0.249633
| 0.229075
| 0
| 0.002127
| 0.235055
| 4,918
| 117
| 188
| 42.034188
| 0.902977
| 0.707808
| 0
| 0
| 0
| 0
| 0.027831
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.148148
| 0
| 0.740741
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
165e63725354de429a448d866f665cccca991916
| 656
|
py
|
Python
|
mmdet/ops/dcn/__init__.py
|
TJUsym/TJU_Advanced_CV_Homework
|
2d85943390e9ba53b80988e0ab8d50aef0cd17da
|
[
"Apache-2.0"
] | 1,158
|
2019-04-26T01:08:32.000Z
|
2022-03-30T06:46:24.000Z
|
mmdet/ops/dcn/__init__.py
|
TJUsym/TJU_Advanced_CV_Homework
|
2d85943390e9ba53b80988e0ab8d50aef0cd17da
|
[
"Apache-2.0"
] | 148
|
2021-03-18T09:44:02.000Z
|
2022-03-31T06:01:39.000Z
|
mmdet/ops/dcn/__init__.py
|
TJUsym/TJU_Advanced_CV_Homework
|
2d85943390e9ba53b80988e0ab8d50aef0cd17da
|
[
"Apache-2.0"
] | 197
|
2020-01-29T09:58:27.000Z
|
2022-03-25T12:08:56.000Z
|
from .functions.deform_conv import deform_conv, modulated_deform_conv
from .functions.deform_pool import deform_roi_pooling
from .modules.deform_conv import (DeformConv, ModulatedDeformConv,
DeformConvPack, ModulatedDeformConvPack)
from .modules.deform_pool import (DeformRoIPooling, DeformRoIPoolingPack,
ModulatedDeformRoIPoolingPack)
__all__ = [
'DeformConv', 'DeformConvPack', 'ModulatedDeformConv',
'ModulatedDeformConvPack', 'DeformRoIPooling', 'DeformRoIPoolingPack',
'ModulatedDeformRoIPoolingPack', 'deform_conv', 'modulated_deform_conv',
'deform_roi_pooling'
]
| 46.857143
| 76
| 0.739329
| 51
| 656
| 9.156863
| 0.352941
| 0.12848
| 0.08137
| 0.107066
| 0.124197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184451
| 656
| 13
| 77
| 50.461538
| 0.872897
| 0
| 0
| 0
| 0
| 0
| 0.275915
| 0.11128
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
1662a331dbe1e237d08e9e21a3e8d596bcbce6c4
| 2,477
|
py
|
Python
|
pyxrd/mixture/models/insitu_behaviours/insitu_behaviour.py
|
PyXRD/pyxrd
|
26bacdf64f3153fa74b8caa62e219b76d91a55c1
|
[
"BSD-2-Clause"
] | 27
|
2018-06-15T15:28:18.000Z
|
2022-03-10T12:23:50.000Z
|
pyxrd/mixture/models/insitu_behaviours/insitu_behaviour.py
|
PyXRD/pyxrd
|
26bacdf64f3153fa74b8caa62e219b76d91a55c1
|
[
"BSD-2-Clause"
] | 22
|
2018-06-14T08:29:16.000Z
|
2021-07-05T13:33:44.000Z
|
pyxrd/mixture/models/insitu_behaviours/insitu_behaviour.py
|
PyXRD/pyxrd
|
26bacdf64f3153fa74b8caa62e219b76d91a55c1
|
[
"BSD-2-Clause"
] | 8
|
2019-04-13T13:03:51.000Z
|
2021-06-19T09:29:11.000Z
|
# coding=UTF-8
# ex:ts=4:sw=4:et=on
#
# Copyright (c) 2013, Mathijs Dumon
# All rights reserved.
# Complete license can be found in the LICENSE file.
from mvc.models.properties import StringProperty
from pyxrd.generic.io.custom_io import storables, Storable
from pyxrd.generic.models.base import DataModel
from pyxrd.refinement.refinables.mixins import RefinementGroup
@storables.register()
class InSituBehaviour(DataModel, RefinementGroup, Storable):
"""
Interface class for coding in-situ behaviour scripts.
Sub-classes should override or implement the methods below.
"""
# MODEL INTEL:
class Meta(DataModel.Meta):
store_id = "InSituBehaviour" # Override this so it is a unique string
concrete = False # Indicates this cannot be instantiated and added in the UI
mixture = property(DataModel.parent.fget, DataModel.parent.fset)
# REFINEMENT GROUP IMPLEMENTATION:
@property
def refine_title(self):
return "In-situ behaviour"
@property
def refine_descriptor_data(self):
return dict(
phase_name=self.phase.refine_title,
component_name="*"
)
#: The name of this Behaviour
name = StringProperty(
default="New Behaviour", text="Name",
visible=True, persistent=True, tabular=True
)
# ------------------------------------------------------------
# Initialization and other internals
# ------------------------------------------------------------
def __init__(self, *args, **kwargs):
my_kwargs = self.pop_kwargs(kwargs,
*[prop.label for prop in InSituBehaviour.Meta.get_local_persistent_properties()]
)
super(InSituBehaviour, self).__init__(*args, **kwargs)
kwargs = my_kwargs
with self.data_changed.hold():
self.name = self.get_kwarg(kwargs, self.name, "name")
pass #end of constructor
# ------------------------------------------------------------
# Methods & Functions
# ------------------------------------------------------------
def apply(self, phase):
assert phase is not None, "Cannot apply on None"
assert self.is_compatible_with(phase), "`%r` is not compatible with phase `%r`" % (self, phase)
def is_compatible_with(self, phase):
return False # sub classes need to override this
pass #end of class
| 34.402778
| 103
| 0.583771
| 264
| 2,477
| 5.371212
| 0.507576
| 0.025388
| 0.022567
| 0.028209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003721
| 0.240614
| 2,477
| 72
| 104
| 34.402778
| 0.750133
| 0.32176
| 0
| 0.105263
| 0
| 0
| 0.068543
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 1
| 0.131579
| false
| 0.052632
| 0.105263
| 0.078947
| 0.421053
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
1665579643c424a545b6a8b3af94a1a9e0f4f184
| 357
|
py
|
Python
|
examples/remove_comments.py
|
igordejanovic/textx-bibtex
|
b1374a39b96da9c1bc979c367b9ed3feb04f4f01
|
[
"MIT"
] | 1
|
2020-06-17T21:51:33.000Z
|
2020-06-17T21:51:33.000Z
|
examples/remove_comments.py
|
igordejanovic/textx-bibtex
|
b1374a39b96da9c1bc979c367b9ed3feb04f4f01
|
[
"MIT"
] | null | null | null |
examples/remove_comments.py
|
igordejanovic/textx-bibtex
|
b1374a39b96da9c1bc979c367b9ed3feb04f4f01
|
[
"MIT"
] | null | null | null |
"""
Remove comments from bib file.
"""
from textx import metamodel_for_language
from txbibtex import bibentry_str
BIB_FILE = 'references.bib'
bibfile = metamodel_for_language('bibtex').model_from_file(BIB_FILE)
# Drop line comments.
print('\n'.join([bibentry_str(e) for e in bibfile.entries
if e.__class__.__name__ != 'BibLineComment']))
| 27.461538
| 68
| 0.739496
| 49
| 357
| 5.020408
| 0.591837
| 0.085366
| 0.162602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148459
| 357
| 12
| 69
| 29.75
| 0.809211
| 0.142857
| 0
| 0
| 0
| 0
| 0.120805
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
16693286bda8fc5cb36e02f9aa7765ff20fcfe4e
| 7,066
|
py
|
Python
|
tests/unit/utils/test_validators.py
|
kajusK/HiddenPlaces
|
aa976f611a419bc33f8a65f0314956ec09fe2bfd
|
[
"MIT"
] | null | null | null |
tests/unit/utils/test_validators.py
|
kajusK/HiddenPlaces
|
aa976f611a419bc33f8a65f0314956ec09fe2bfd
|
[
"MIT"
] | null | null | null |
tests/unit/utils/test_validators.py
|
kajusK/HiddenPlaces
|
aa976f611a419bc33f8a65f0314956ec09fe2bfd
|
[
"MIT"
] | null | null | null |
"""Unit tests for app.validators. """
from wtforms import ValidationError
import flask
from pytest import raises
from app.utils.validators import password_rules, image_file, allowed_file
class DummyField(object):
"""Dummy field object to emulate wtforms field."""
def __init__(self, data=None, errors=(), raw_data=None):
self.data = data
self.errors = list(errors)
self.raw_data = raw_data
def gettext(self, string):
return string
def ngettext(self, singular, plural, n):
return singular
class DummyForm(dict):
"""Dummy form object to emulate wtforms form."""
pass
class DummyFile(object):
"""Dummy file like class to emulate uploaded file handler."""
def __init__(self, filename):
self.filename = filename
def __repr__(self):
return self.filename
def _run_validator_check(subtests, validator, valid, invalid):
"""Runs tests again validator with valid and invalid inputs.
Args:
subtest: Subtests fixture.
validator: Validator instance to run tests against
valid: List of valid inputs
invalid: List of invalid inputs
"""
field = DummyField()
for item in valid:
field.data = item
with subtests.test(item=item):
validator(DummyForm(), field)
for item in invalid:
field.data = item
with subtests.test(item=item):
with raises(ValidationError):
validator(DummyForm(), field)
def test_allowed_file(subtests, req_context):
validator = allowed_file()
extensions = ['exe', 'html']
valid = ['foo.jpg', 'exe', 'foo.exe.zip', 'foo']
invalid = ['foo.exe', 'foo.EXE', 'foo.pdf.exe', 'foo.html']
valid = [DummyFile(x) for x in valid]
invalid = [DummyFile(x) for x in invalid]
flask.current_app.config['DISABLED_EXTENSIONS'] = extensions
with flask.current_app.test_request_context():
_run_validator_check(subtests, validator, valid, invalid)
def test_allowed_file_multiple(subtests, req_context):
validator = allowed_file()
extensions = ['exe', 'html']
valid = ['foo.jpg', 'exe', 'foo.exe.zip', 'foo']
invalid = ['foo.exe', 'foo.EXE', 'foo.pdf.exe', 'foo.html']
valid = [[DummyFile(x) for x in valid], [DummyFile(valid[0])],
[DummyFile(valid[0]), DummyFile(valid[1])]]
invalid = [[DummyFile(x) for x in invalid], [DummyFile(invalid[0])],
[DummyFile(invalid[0]), DummyFile(invalid[1])]]
flask.current_app.config['DISABLED_EXTENSIONS'] = extensions
with flask.current_app.test_request_context():
_run_validator_check(subtests, validator, valid, invalid)
def test_allowed_file_message(req_context):
validator = allowed_file(message="custom message")
field = DummyField()
field.data = DummyFile("blah.foo")
flask.current_app.config['DISABLED_EXTENSIONS'] = ['foo']
with flask.current_app.test_request_context():
with raises(ValidationError) as e:
validator(DummyForm(), field)
assert str(e.value) == "custom message"
def test_image_file(subtests, req_context):
validator = image_file()
extensions = ['jpg', 'png', 'tiff']
valid = ['foo.jpg', 'foo.JPG', 'bar.png', 'blah.tiff', 'a.foo.jpg']
invalid = ['foo', 'jpg', 'foo.pdf', 'foo.jpg.pdf', '', '.jpg', 'o.gif']
valid = [DummyFile(x) for x in valid]
invalid = [DummyFile(x) for x in invalid]
flask.current_app.config['IMAGE_EXTENSIONS'] = extensions
with flask.current_app.test_request_context():
_run_validator_check(subtests, validator, valid, invalid)
def test_image_file_multiple(subtests, req_context):
validator = image_file()
extensions = ['jpg', 'png', 'tiff']
valid = ['foo.jpg', 'foo.JPG', 'bar.png', 'blah.tiff', 'a.foo.jpg']
invalid = ['foo', 'jpg', 'foo.pdf', 'foo.jpg.pdf', '', '.jpg', 'o.gif']
valid = [[DummyFile(x) for x in valid], [DummyFile(valid[0])],
[DummyFile(valid[0]), DummyFile(valid[1])]]
invalid = [[DummyFile(x) for x in invalid], [DummyFile(invalid[0])],
[DummyFile(invalid[0]), DummyFile(invalid[1])]]
flask.current_app.config['IMAGE_EXTENSIONS'] = extensions
with flask.current_app.test_request_context():
_run_validator_check(subtests, validator, valid, invalid)
def test_image_file_message(req_context):
validator = image_file(message="custom message")
field = DummyField()
field.data = DummyFile("blah")
flask.current_app.config['IMAGE_EXTENSIONS'] = ['foo']
with flask.current_app.test_request_context():
with raises(ValidationError) as e:
validator(DummyForm(), field)
assert str(e.value) == "custom message"
def test_password_rules_length(subtests):
validator = password_rules(length=6, upper=None, lower=None, numeric=None,
special=None)
valid = ["as123.21", "abcdef", "sdadadaswasasa", "1234567", "...,.,..,",
"AAAAAAA", "AbCdEf"]
invalid = ["abc", "123", "....", "aBcDe", "a1.V3"]
_run_validator_check(subtests, validator, valid, invalid)
def test_password_rules_upper(subtests):
validator = password_rules(length=6, upper=2, lower=None, numeric=None,
special=None)
valid = ["abcDEf", "HellOO", "ABCDEZ", "A.b#3CZ", "ADSDSA"]
invalid = ["abcdEf", "helloo", "A231sdsd"]
_run_validator_check(subtests, validator, valid, invalid)
def test_password_rules_lower(subtests):
validator = password_rules(length=6, upper=None, lower=3, numeric=None,
special=None)
valid = ["abcdefg", "axzBAR", "123abcdsa", "AbCdEfGh", "..as..2ds.."]
invalid = ["foOBAR", "123ABcdSA", "1a2b.C#"]
_run_validator_check(subtests, validator, valid, invalid)
def test_password_rules_numeric(subtests):
validator = password_rules(length=6, upper=None, lower=None, numeric=2,
special=None)
valid = ["1bcd4A.d", "123456", "a?9#.0"]
invalid = ["2ds.#<", "abcdef", "ABCDEF", "x2U.'Q"]
_run_validator_check(subtests, validator, valid, invalid)
def test_password_rules_special(subtests):
validator = password_rules(length=6, upper=None, lower=None, numeric=None,
special=3)
valid = ["ab.?123!", ".#@dS9", "abcdef123><?"]
invalid = ["abcdef", ".23134", "AbCd123,]"]
_run_validator_check(subtests, validator, valid, invalid)
def test_password_rules_all(subtests):
validator = password_rules(length=6, upper=2, lower=1, numeric=1,
special=1)
valid = ["ABc1.2", "abcDEF123#%^", "a2B.C?"]
invalid = ["helloo", "ABCDEF", "Ab1.?c"]
_run_validator_check(subtests, validator, valid, invalid)
def test_password_rules_message(subtests):
validator = password_rules(length=100, message="custom message")
field = DummyField()
field.data = "wrong"
with raises(ValidationError) as e:
validator(DummyForm(), field)
assert str(e.value) == "custom message"
| 35.686869
| 78
| 0.644495
| 849
| 7,066
| 5.202591
| 0.177856
| 0.069278
| 0.040752
| 0.062259
| 0.709079
| 0.678515
| 0.662441
| 0.633009
| 0.616255
| 0.582975
| 0
| 0.01664
| 0.209029
| 7,066
| 197
| 79
| 35.86802
| 0.773663
| 0.055618
| 0
| 0.532847
| 0
| 0
| 0.125057
| 0
| 0
| 0
| 0
| 0
| 0.021898
| 1
| 0.138686
| false
| 0.116788
| 0.029197
| 0.021898
| 0.211679
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
166b671e9115e476c69bab6e6077599dd6b6cdea
| 5,434
|
py
|
Python
|
tests/authorization/test_searches.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
tests/authorization/test_searches.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
tests/authorization/test_searches.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Unit tests of authorization searches."""
import pytest
from ..utilities.general import is_never_authz, is_no_authz, uses_cataloging, uses_filesystem_only
from dlkit.abstract_osid.osid import errors
from dlkit.primordium.id.primitives import Id
from dlkit.primordium.type.primitives import Type
from dlkit.runtime import PROXY_SESSION, proxy_example
from dlkit.runtime.managers import Runtime
REQUEST = proxy_example.SimpleRequest()
CONDITION = PROXY_SESSION.get_proxy_condition()
CONDITION.set_http_request(REQUEST)
PROXY = PROXY_SESSION.get_proxy(CONDITION)
DEFAULT_TYPE = Type(**{'identifier': 'DEFAULT', 'namespace': 'DEFAULT', 'authority': 'DEFAULT'})
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE'])
def authorization_search_class_fixture(request):
# From test_templates/resource.py::ResourceSearch::init_template
request.cls.service_config = request.param
request.cls.svc_mgr = Runtime().get_service_manager(
'AUTHORIZATION',
proxy=PROXY,
implementation=request.cls.service_config)
create_form = request.cls.svc_mgr.get_vault_form_for_create([])
create_form.display_name = 'Test catalog'
create_form.description = 'Test catalog description'
request.cls.catalog = request.cls.svc_mgr.create_vault(create_form)
def class_tear_down():
request.cls.svc_mgr.delete_vault(request.cls.catalog.ident)
request.addfinalizer(class_tear_down)
@pytest.fixture(scope="function")
def authorization_search_test_fixture(request):
# From test_templates/resource.py::ResourceSearch::init_template
request.cls.search = request.cls.catalog.get_authorization_search()
@pytest.mark.usefixtures("authorization_search_class_fixture", "authorization_search_test_fixture")
class TestAuthorizationSearch(object):
"""Tests for AuthorizationSearch"""
@pytest.mark.skip('unimplemented test')
def test_search_among_authorizations(self):
"""Tests search_among_authorizations"""
pass
@pytest.mark.skip('unimplemented test')
def test_order_authorization_results(self):
"""Tests order_authorization_results"""
pass
@pytest.mark.skip('unimplemented test')
def test_get_authorization_search_record(self):
"""Tests get_authorization_search_record"""
pass
@pytest.mark.usefixtures("authorization_search_results_class_fixture", "authorization_search_results_test_fixture")
class TestAuthorizationSearchResults(object):
"""Tests for AuthorizationSearchResults"""
@pytest.mark.skip('unimplemented test')
def test_get_authorizations(self):
"""Tests get_authorizations"""
pass
@pytest.mark.skip('unimplemented test')
def test_get_authorization_query_inspector(self):
"""Tests get_authorization_query_inspector"""
pass
@pytest.mark.skip('unimplemented test')
def test_get_authorization_search_results_record(self):
"""Tests get_authorization_search_results_record"""
pass
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE'])
def vault_search_class_fixture(request):
# From test_templates/resource.py::ResourceSearch::init_template
request.cls.service_config = request.param
request.cls.svc_mgr = Runtime().get_service_manager(
'AUTHORIZATION',
proxy=PROXY,
implementation=request.cls.service_config)
create_form = request.cls.svc_mgr.get_vault_form_for_create([])
create_form.display_name = 'Test catalog'
create_form.description = 'Test catalog description'
request.cls.catalog = request.cls.svc_mgr.create_vault(create_form)
def class_tear_down():
request.cls.svc_mgr.delete_vault(request.cls.catalog.ident)
request.addfinalizer(class_tear_down)
@pytest.fixture(scope="function")
def vault_search_test_fixture(request):
# From test_templates/resource.py::ResourceSearch::init_template
request.cls.search = request.cls.catalog.get_vault_search()
@pytest.mark.usefixtures("vault_search_class_fixture", "vault_search_test_fixture")
class TestVaultSearch(object):
"""Tests for VaultSearch"""
@pytest.mark.skip('unimplemented test')
def test_search_among_vaults(self):
"""Tests search_among_vaults"""
pass
@pytest.mark.skip('unimplemented test')
def test_order_vault_results(self):
"""Tests order_vault_results"""
pass
@pytest.mark.skip('unimplemented test')
def test_get_vault_search_record(self):
"""Tests get_vault_search_record"""
pass
@pytest.mark.usefixtures("vault_search_results_class_fixture", "vault_search_results_test_fixture")
class TestVaultSearchResults(object):
"""Tests for VaultSearchResults"""
@pytest.mark.skip('unimplemented test')
def test_get_vaults(self):
"""Tests get_vaults"""
pass
@pytest.mark.skip('unimplemented test')
def test_get_vault_query_inspector(self):
"""Tests get_vault_query_inspector"""
pass
@pytest.mark.skip('unimplemented test')
def test_get_vault_search_results_record(self):
"""Tests get_vault_search_results_record"""
pass
| 36.469799
| 176
| 0.749724
| 643
| 5,434
| 6.001555
| 0.157076
| 0.051827
| 0.043535
| 0.08396
| 0.714693
| 0.637989
| 0.584089
| 0.584089
| 0.56284
| 0.502721
| 0
| 0
| 0.144645
| 5,434
| 148
| 177
| 36.716216
| 0.83025
| 0.146117
| 0
| 0.586957
| 0
| 0
| 0.200615
| 0.109866
| 0
| 0
| 0
| 0
| 0
| 1
| 0.195652
| false
| 0.130435
| 0.076087
| 0
| 0.315217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
166ed868a00e2876de6024b3dcf661e7d6afc455
| 216
|
py
|
Python
|
OOP_MiniQuiz/run_car_Level2.py
|
HelloYeew/helloyeew-lab-computer-programming-i
|
60b05072f32f23bab4a336b506ba7f66e52c045d
|
[
"MIT"
] | null | null | null |
OOP_MiniQuiz/run_car_Level2.py
|
HelloYeew/helloyeew-lab-computer-programming-i
|
60b05072f32f23bab4a336b506ba7f66e52c045d
|
[
"MIT"
] | null | null | null |
OOP_MiniQuiz/run_car_Level2.py
|
HelloYeew/helloyeew-lab-computer-programming-i
|
60b05072f32f23bab4a336b506ba7f66e52c045d
|
[
"MIT"
] | null | null | null |
from car import *
def compare(car1,car2):
print(car1)
print(car2)
car1 = Car("Nissan","Tiida",450000)
car2 = Car("Toyota","Vios",400000)
car3 = Car("BMW","X3",3400000)
compare(car3,car1)
compare(car1,car2)
| 18
| 35
| 0.671296
| 32
| 216
| 4.53125
| 0.5625
| 0.151724
| 0.206897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165775
| 0.134259
| 216
| 12
| 36
| 18
| 0.609626
| 0
| 0
| 0
| 0
| 0
| 0.119816
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.222222
| 0.222222
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16725a52de27142aa18864c727dddea44204b666
| 5,940
|
py
|
Python
|
beartype/vale/__init__.py
|
posita/beartype
|
e56399686e1f2ffd5128a4030b19314504e32450
|
[
"MIT"
] | null | null | null |
beartype/vale/__init__.py
|
posita/beartype
|
e56399686e1f2ffd5128a4030b19314504e32450
|
[
"MIT"
] | null | null | null |
beartype/vale/__init__.py
|
posita/beartype
|
e56399686e1f2ffd5128a4030b19314504e32450
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2021 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype validators.**
This submodule publishes a PEP-compliant hierarchy of subscriptable (indexable)
classes enabling callers to validate the internal structure of arbitrarily
complex scalars, data structures, and third-party objects. Like annotation
objects defined by the :mod:`typing` module (e.g., :attr:`typing.Union`), these
classes dynamically generate PEP-compliant type hints when subscripted
(indexed) and are thus intended to annotate callables and variables. Unlike
annotation objects defined by the :mod:`typing` module, these classes are *not*
explicitly covered by existing PEPs and thus *not* directly usable as
annotations.
Instead, callers are expected to (in order):
#. Annotate callable parameters and returns to be validated with
:pep:`593`-compliant :attr:`typing.Annotated` type hints.
#. Subscript those hints with (in order):
#. The type of those parameters and returns.
#. One or more subscriptions of classes declared by this submodule.
'''
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To avoid polluting the public module namespace, external attributes
# should be locally imported at module scope *ONLY* under alternate private
# names (e.g., "from argparse import ArgumentParser as _ArgumentParser" rather
# than merely "from argparse import ArgumentParser").
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from beartype.vale._is._valeis import _IsFactory
from beartype.vale._is._valeistype import (
_IsInstanceFactory,
_IsSubclassFactory,
)
from beartype.vale._is._valeisobj import _IsAttrFactory
from beartype.vale._is._valeisoper import _IsEqualFactory
# ....................{ SINGLETONS }....................
# Public factory singletons instantiating these private factory classes.
Is = _IsFactory(basename='Is')
IsAttr = _IsAttrFactory(basename='IsAttr')
IsEqual = _IsEqualFactory(basename='IsEqual')
IsInstance = _IsInstanceFactory(basename='IsInstance')
IsSubclass = _IsSubclassFactory(basename='IsSubclass')
# Delete all private factory classes imported above for safety.
del (
_IsFactory,
_IsAttrFactory,
_IsEqualFactory,
_IsInstanceFactory,
_IsSubclassFactory,
)
# ....................{ TODO }....................
#FIXME: As intelligently requested by @Saphyel at #32, add support for
#additional classes support constraints resembling:
#
#* String constraints:
# * Email.
# * Uuid.
# * Choice.
# * Language.
# * Locale.
# * Country.
# * Currency.
#* Comparison constraints
# * IdenticalTo.
# * NotIdenticalTo.
# * LessThan.
# * GreaterThan.
# * Range.
# * DivisibleBy.
#FIXME: Add a new BeartypeValidator.get_cause_or_none() method with the same
#signature and docstring as the existing CauseSleuth.get_cause_or_none()
#method. This new BeartypeValidator.get_cause_or_none() method should then be
#called by the "_peperrorannotated" submodule to generate human-readable
#exception messages. Note that this implies that:
#* The BeartypeValidator.__init__() method will need to additionally accept a new
# mandatory "get_cause_or_none: Callable[[], Optional[str]]" parameter, which
# that method should then localize to "self.get_cause_or_none".
#* Each __class_getitem__() dunder method of each "_BeartypeValidatorFactoryABC" subclass will need
# to additionally define and pass that callable when creating and returning
# its "BeartypeValidator" instance.
#FIXME: *BRILLIANT IDEA.* Holyshitballstime. The idea here is that we can
#leverage all of our existing "beartype.is" infrastructure to dynamically
#synthesize PEP-compliant type hints that would then be implicitly supported by
#any runtime type checker. At present, subscriptions of "Is" (e.g.,
#"Annotated[str, Is[lambda text: bool(text)]]") are only supported by beartype
#itself. Of course, does anyone care? I mean, if you're using a runtime type
#checker, you're probably *ONLY* using beartype. Right? That said, this would
#technically improve portability by allowing users to switch between different
#checkers... except not really, since they'd still have to import beartype
#infrastructure to do so. So, this is probably actually useless.
#
#Nonetheless, the idea itself is trivial. We declare a new
#"beartype.is.Portable" singleton accessed in the same way: e.g.,
# from beartype import beartype
# from beartype.is import Portable
# NonEmptyStringTest = Is[lambda text: bool(text)]
# NonEmptyString = Portable[str, NonEmptyStringTest]
# @beartype
# def munge_it(text: NonEmptyString) -> str: ...
#
#So what's the difference between "typing.Annotated" and "beartype.is.Portable"
#then? Simple. The latter dynamically generates one new PEP 3119-compliant
#metaclass and associated class whenever subscripted. Clearly, this gets
#expensive in both space and time consumption fast -- which is why this won't
#be the default approach. For safety, this new class does *NOT* subclass the
#first subscripted class. Instead:
#* This new metaclass of this new class simply defines an __isinstancecheck__()
# dunder method. For the above example, this would be:
# class NonEmptyStringMetaclass(object):
# def __isinstancecheck__(cls, obj) -> bool:
# return isinstance(obj, str) and NonEmptyStringTest(obj)
#* This new class would then be entirely empty. For the above example, this
# would be:
# class NonEmptyStringClass(object, metaclass=NonEmptyStringMetaclass):
# pass
#
#Well, so much for brilliant. It's slow and big, so it seems doubtful anyone
#would actually do that. Nonetheless, that's food for thought for you.
| 45.343511
| 99
| 0.711616
| 714
| 5,940
| 5.837535
| 0.457983
| 0.017274
| 0.011996
| 0.016795
| 0.071017
| 0.056622
| 0.056622
| 0.037428
| 0
| 0
| 0
| 0.003598
| 0.157744
| 5,940
| 130
| 100
| 45.692308
| 0.829502
| 0.868687
| 0
| 0.210526
| 0
| 0
| 0.050725
| 0
| 0
| 0
| 0
| 0.007692
| 0
| 1
| 0
| false
| 0
| 0.210526
| 0
| 0.210526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16848dd03e02c952cce813e4092be02064f38ca9
| 1,470
|
py
|
Python
|
githubdl/url_helpers.py
|
wilvk/githubdl
|
1dc8c1c0d93a8e4b8155aecf4f5e73e2931ed920
|
[
"MIT"
] | 16
|
2018-06-20T00:01:40.000Z
|
2022-01-24T08:16:17.000Z
|
githubdl/url_helpers.py
|
wilvk/githubdl
|
1dc8c1c0d93a8e4b8155aecf4f5e73e2931ed920
|
[
"MIT"
] | 12
|
2018-07-18T21:09:37.000Z
|
2020-03-28T23:38:13.000Z
|
githubdl/url_helpers.py
|
wilvk/githubdl
|
1dc8c1c0d93a8e4b8155aecf4f5e73e2931ed920
|
[
"MIT"
] | null | null | null |
import re
from urllib.parse import urlparse
import logging
def check_url_is_http(repo_url):
predicate = re.compile('^https?://.*$')
match = predicate.search(repo_url)
return False if match is None else True
def check_url_is_ssh(repo_url):
predicate = re.compile(r'^git\@.*\.git$')
match = predicate.search(repo_url)
return False if match is None else True
def get_domain_name_from_http_url(repo_url):
site_object = urlparse(repo_url)
return site_object.netloc
def get_repo_name_from_http_url(repo_url):
site_object = urlparse(repo_url)
parsed_string = re.sub(r'\.git$', '', site_object.path)
if parsed_string[0] == '/':
return parsed_string[1:]
return parsed_string
def get_repo_name_from_ssh_url(repo_url):
predicate = re.compile(r'(?<=\:)(.*)(?=\.)')
match = predicate.search(repo_url)
return match.group()
def get_domain_name_from_ssh_url(repo_url):
predicate = re.compile(r'(?<=\@)(.*)(?=\:)')
match = predicate.search(repo_url)
return match.group()
def validate_protocol_exists(is_ssh, is_http):
if not is_ssh and not is_http:
err_message = "Error: repository url provided is not http(s) or ssh"
logging.critical(err_message)
raise RuntimeError(err_message)
def check_url_protocol(repo_url):
is_ssh = check_url_is_ssh(repo_url)
is_http = check_url_is_http(repo_url)
validate_protocol_exists(is_ssh, is_http)
return (is_ssh, is_http)
| 31.276596
| 76
| 0.706803
| 223
| 1,470
| 4.327354
| 0.246637
| 0.108808
| 0.067358
| 0.074611
| 0.635233
| 0.577202
| 0.472539
| 0.404145
| 0.404145
| 0.404145
| 0
| 0.001643
| 0.172109
| 1,470
| 46
| 77
| 31.956522
| 0.79129
| 0
| 0
| 0.263158
| 0
| 0
| 0.081633
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0.078947
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
168cde4a792e9985c473078c1d3e1678761198e7
| 4,873
|
py
|
Python
|
homeassistant/components/media_player/pjlink.py
|
dauden1184/home-assistant
|
f4c6d389b77d0efa86644e76604eaea5d21abdb5
|
[
"Apache-2.0"
] | 4
|
2019-01-10T14:47:54.000Z
|
2021-04-22T02:06:27.000Z
|
homeassistant/components/media_player/pjlink.py
|
dauden1184/home-assistant
|
f4c6d389b77d0efa86644e76604eaea5d21abdb5
|
[
"Apache-2.0"
] | 6
|
2021-02-08T21:02:40.000Z
|
2022-03-12T00:52:16.000Z
|
homeassistant/components/media_player/pjlink.py
|
dauden1184/home-assistant
|
f4c6d389b77d0efa86644e76604eaea5d21abdb5
|
[
"Apache-2.0"
] | 3
|
2018-08-29T19:26:20.000Z
|
2020-01-19T11:58:22.000Z
|
"""
Support for controlling projector via the PJLink protocol.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.pjlink/
"""
import logging
import voluptuous as vol
from homeassistant.components.media_player import (
PLATFORM_SCHEMA, SUPPORT_SELECT_SOURCE, SUPPORT_TURN_OFF, SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE, MediaPlayerDevice)
from homeassistant.const import (
CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, STATE_OFF, STATE_ON)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pypjlink2==1.2.0']
_LOGGER = logging.getLogger(__name__)
CONF_ENCODING = 'encoding'
DEFAULT_PORT = 4352
DEFAULT_ENCODING = 'utf-8'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
})
SUPPORT_PJLINK = SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the PJLink platform."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
name = config.get(CONF_NAME)
encoding = config.get(CONF_ENCODING)
password = config.get(CONF_PASSWORD)
if 'pjlink' not in hass.data:
hass.data['pjlink'] = {}
hass_data = hass.data['pjlink']
device_label = "{}:{}".format(host, port)
if device_label in hass_data:
return
device = PjLinkDevice(host, port, name, encoding, password)
hass_data[device_label] = device
add_entities([device], True)
def format_input_source(input_source_name, input_source_number):
"""Format input source for display in UI."""
return "{} {}".format(input_source_name, input_source_number)
class PjLinkDevice(MediaPlayerDevice):
"""Representation of a PJLink device."""
def __init__(self, host, port, name, encoding, password):
"""Iinitialize the PJLink device."""
self._host = host
self._port = port
self._name = name
self._password = password
self._encoding = encoding
self._muted = False
self._pwstate = STATE_OFF
self._current_source = None
with self.projector() as projector:
if not self._name:
self._name = projector.get_name()
inputs = projector.get_inputs()
self._source_name_mapping = \
{format_input_source(*x): x for x in inputs}
self._source_list = sorted(self._source_name_mapping.keys())
def projector(self):
"""Create PJLink Projector instance."""
from pypjlink import Projector
projector = Projector.from_address(
self._host, self._port, self._encoding)
projector.authenticate(self._password)
return projector
def update(self):
"""Get the latest state from the device."""
with self.projector() as projector:
pwstate = projector.get_power()
if pwstate == 'off':
self._pwstate = STATE_OFF
else:
self._pwstate = STATE_ON
self._muted = projector.get_mute()[1]
self._current_source = \
format_input_source(*projector.get_input())
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._pwstate
@property
def is_volume_muted(self):
"""Return boolean indicating mute status."""
return self._muted
@property
def source(self):
"""Return current input source."""
return self._current_source
@property
def source_list(self):
"""Return all available input sources."""
return self._source_list
@property
def supported_features(self):
"""Return projector supported features."""
return SUPPORT_PJLINK
def turn_off(self):
"""Turn projector off."""
with self.projector() as projector:
projector.set_power('off')
def turn_on(self):
"""Turn projector on."""
with self.projector() as projector:
projector.set_power('on')
def mute_volume(self, mute):
"""Mute (true) of unmute (false) media player."""
with self.projector() as projector:
from pypjlink import MUTE_AUDIO
projector.set_mute(MUTE_AUDIO, mute)
def select_source(self, source):
"""Set the input source."""
source = self._source_name_mapping[source]
with self.projector() as projector:
projector.set_input(*source)
| 31.038217
| 78
| 0.65668
| 580
| 4,873
| 5.277586
| 0.227586
| 0.03953
| 0.033322
| 0.037243
| 0.159752
| 0.063378
| 0.04247
| 0.029402
| 0
| 0
| 0
| 0.002704
| 0.241125
| 4,873
| 156
| 79
| 31.237179
| 0.825041
| 0.14488
| 0
| 0.134615
| 0
| 0
| 0.015935
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.144231
| false
| 0.067308
| 0.067308
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
168f0267685e958dd990feeae60a1086e6b78107
| 31,038
|
py
|
Python
|
pxr/usd/usdGeom/testenv/testUsdGeomSchemata.py
|
yurivict/USD
|
3b097e3ba8fabf1777a1256e241ea15df83f3065
|
[
"Apache-2.0"
] | 1
|
2022-03-16T01:40:10.000Z
|
2022-03-16T01:40:10.000Z
|
pxr/usd/usdGeom/testenv/testUsdGeomSchemata.py
|
yurivict/USD
|
3b097e3ba8fabf1777a1256e241ea15df83f3065
|
[
"Apache-2.0"
] | null | null | null |
pxr/usd/usdGeom/testenv/testUsdGeomSchemata.py
|
yurivict/USD
|
3b097e3ba8fabf1777a1256e241ea15df83f3065
|
[
"Apache-2.0"
] | 1
|
2018-10-03T19:08:33.000Z
|
2018-10-03T19:08:33.000Z
|
#!/pxrpythonsubst
#
# Copyright 2017 Pixar
#
# Licensed under the Apache License, Version 2.0 (the "Apache License")
# with the following modification; you may not use this file except in
# compliance with the Apache License and the following modification to it:
# Section 6. Trademarks. is deleted and replaced with:
#
# 6. Trademarks. This License does not grant permission to use the trade
# names, trademarks, service marks, or product names of the Licensor
# and its affiliates, except as required to comply with Section 4(c) of
# the License and to reproduce the content of the NOTICE file.
#
# You may obtain a copy of the Apache License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Apache License with the above modification is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the Apache License for the specific
# language governing permissions and limitations under the Apache License.
# pylint: disable=map-builtin-not-iterating
import sys, unittest
from pxr import Sdf, Usd, UsdGeom, Vt, Gf, Tf
class TestUsdGeomSchemata(unittest.TestCase):
def test_Basic(self):
l = Sdf.Layer.CreateAnonymous()
stage = Usd.Stage.Open(l.identifier)
p = stage.DefinePrim("/Mesh", "Mesh")
self.assertTrue(p)
mesh = UsdGeom.Mesh(p)
self.assertTrue(mesh)
self.assertTrue(mesh.GetPrim())
self.assertTrue(not mesh.GetPointsAttr().Get(1))
self.assertEqual(p.GetTypeName(),
Usd.SchemaRegistry().GetSchemaTypeName(mesh._GetStaticTfType()))
#
# Make sure uniform access behaves as expected.
#
ori = p.GetAttribute("orientation")
# The generic orientation attribute should be automatically defined because
# it is a registered attribute of a well known schema. However, it's not
# yet authored at the current edit target.
self.assertTrue(ori.IsDefined())
self.assertTrue(not ori.IsAuthoredAt(ori.GetStage().GetEditTarget()))
# Author a value, and check that it's still defined, and now is in fact
# authored at the current edit target.
ori.Set(UsdGeom.Tokens.leftHanded)
self.assertTrue(ori.IsDefined())
self.assertTrue(ori.IsAuthoredAt(ori.GetStage().GetEditTarget()))
mesh.GetOrientationAttr().Set(UsdGeom.Tokens.rightHanded, 10)
# "leftHanded" should have been authored at Usd.TimeCode.Default, so reading the
# attribute at Default should return lh, not rh.
self.assertEqual(ori.Get(), UsdGeom.Tokens.leftHanded)
# The value "rightHanded" was set at t=10, so reading *any* time should
# return "rightHanded"
self.assertEqual(ori.Get(9.9), UsdGeom.Tokens.rightHanded)
self.assertEqual(ori.Get(10), UsdGeom.Tokens.rightHanded)
self.assertEqual(ori.Get(10.1), UsdGeom.Tokens.rightHanded)
self.assertEqual(ori.Get(11), UsdGeom.Tokens.rightHanded)
#
# Attribute name sanity check. We expect the names returned by the schema
# to match the names returned via the generic API.
#
self.assertTrue(len(mesh.GetSchemaAttributeNames()) > 0)
self.assertNotEqual(mesh.GetSchemaAttributeNames(True), mesh.GetSchemaAttributeNames(False))
for n in mesh.GetSchemaAttributeNames():
# apiName overrides
if n == "primvars:displayColor":
n = "displayColor"
elif n == "primvars:displayOpacity":
n = "displayOpacity"
name = n[0].upper() + n[1:]
self.assertTrue(("Get" + name + "Attr") in dir(mesh),
("Get" + name + "Attr() not found in: " + str(dir(mesh))))
def test_IsA(self):
# Author Scene and Compose Stage
l = Sdf.Layer.CreateAnonymous()
stage = Usd.Stage.Open(l.identifier)
# For every prim schema type in this module, validate that:
# 1. We can define a prim of its type
# 2. Its type and inheritance matches our expectations
# 3. At least one of its builtin properties is available and defined
# BasisCurves Tests
schema = UsdGeom.BasisCurves.Define(stage, "/BasisCurves")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # BasisCurves is not a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # BasisCurves is a Xformable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # BasisCurves is not a Cylinder
self.assertTrue(schema.GetBasisAttr())
# Camera Tests
schema = UsdGeom.Camera.Define(stage, "/Camera")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # Camera is not a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # Camera is a Xformable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # Camera is not a Cylinder
self.assertTrue(schema.GetFocalLengthAttr())
# Capsule Tests
schema = UsdGeom.Capsule.Define(stage, "/Capsule")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # Capsule is not a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # Capsule is a Xformable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # Capsule is not a Cylinder
self.assertTrue(schema.GetAxisAttr())
# Cone Tests
schema = UsdGeom.Cone.Define(stage, "/Cone")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # Cone is not a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # Cone is a Xformable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # Cone is not a Cylinder
self.assertTrue(schema.GetAxisAttr())
# Cube Tests
schema = UsdGeom.Cube.Define(stage, "/Cube")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # Cube is not a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # Cube is a Xformable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # Cube is not a Cylinder
self.assertTrue(schema.GetSizeAttr())
# Cylinder Tests
schema = UsdGeom.Cylinder.Define(stage, "/Cylinder")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # Cylinder is not a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # Cylinder is a Xformable
self.assertTrue(prim.IsA(UsdGeom.Cylinder)) # Cylinder is a Cylinder
self.assertTrue(schema.GetAxisAttr())
# Mesh Tests
schema = UsdGeom.Mesh.Define(stage, "/Mesh")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertTrue(prim.IsA(UsdGeom.Mesh)) # Mesh is a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # Mesh is a XFormable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # Mesh is not a Cylinder
self.assertTrue(schema.GetFaceVertexCountsAttr())
# NurbsCurves Tests
schema = UsdGeom.NurbsCurves.Define(stage, "/NurbsCurves")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # NurbsCurves is not a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # NurbsCurves is a Xformable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # NurbsCurves is not a Cylinder
self.assertTrue(schema.GetKnotsAttr())
# NurbsPatch Tests
schema = UsdGeom.NurbsPatch.Define(stage, "/NurbsPatch")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # NurbsPatch is not a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # NurbsPatch is a Xformable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # NurbsPatch is not a Cylinder
self.assertTrue(schema.GetUKnotsAttr())
# Points Tests
schema = UsdGeom.Points.Define(stage, "/Points")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # Points is not a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # Points is a Xformable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # Points is not a Cylinder
self.assertTrue(schema.GetWidthsAttr())
# Scope Tests
schema = UsdGeom.Scope.Define(stage, "/Scope")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # Scope is not a Mesh
self.assertFalse(prim.IsA(UsdGeom.Xformable)) # Scope is not a Xformable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # Scope is not a Cylinder
# Scope has no builtins!
# Sphere Tests
schema = UsdGeom.Sphere.Define(stage, "/Sphere")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # Sphere is not a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # Sphere is a Xformable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # Sphere is not a Cylinder
self.assertTrue(schema.GetRadiusAttr())
# Xform Tests
schema = UsdGeom.Xform.Define(stage, "/Xform")
self.assertTrue(schema)
prim = schema.GetPrim()
self.assertFalse(prim.IsA(UsdGeom.Mesh)) # Xform is not a Mesh
self.assertTrue(prim.IsA(UsdGeom.Xformable)) # Xform is a Xformable
self.assertFalse(prim.IsA(UsdGeom.Cylinder)) # Xform is not a Cylinder
self.assertTrue(schema.GetXformOpOrderAttr())
def test_Fallbacks(self):
# Author Scene and Compose Stage
stage = Usd.Stage.CreateInMemory()
# Xformable Tests
identity = Gf.Matrix4d(1)
origin = Gf.Vec3f(0, 0, 0)
xform = UsdGeom.Xform.Define(stage, "/Xform") # direct subclass
xformOpOrder = xform.GetXformOpOrderAttr()
self.assertFalse(xformOpOrder.HasAuthoredValue())
# xformOpOrder has no fallback value
self.assertEqual(xformOpOrder.Get(), None)
self.assertFalse(xformOpOrder.HasFallbackValue())
# Try authoring and reverting...
xformOpOrderAttr = xform.GetPrim().GetAttribute(UsdGeom.Tokens.xformOpOrder)
self.assertTrue(xformOpOrderAttr)
self.assertEqual(xformOpOrderAttr.Get(), None)
opOrderVal = ["xformOp:transform"]
self.assertTrue(xformOpOrderAttr.Set(opOrderVal))
self.assertTrue(xformOpOrderAttr.HasAuthoredValue())
self.assertNotEqual(xformOpOrderAttr.Get(), None)
self.assertTrue(xformOpOrderAttr.Clear())
self.assertFalse(xformOpOrderAttr.HasAuthoredValue())
self.assertEqual(xformOpOrderAttr.Get(), None)
self.assertFalse(xformOpOrder.HasFallbackValue())
mesh = UsdGeom.Mesh.Define(stage, "/Mesh") # multiple ancestor hops
# PointBased and Curves
curves = UsdGeom.BasisCurves.Define(stage, "/Curves")
self.assertEqual(curves.GetNormalsInterpolation(), UsdGeom.Tokens.vertex)
self.assertEqual(curves.GetWidthsInterpolation(), UsdGeom.Tokens.vertex)
# Before we go, test that CreateXXXAttr performs as we expect in various
# scenarios
# Number 1: Sparse and non-sparse authoring on def'd prim
mesh.CreateDoubleSidedAttr(False, True)
self.assertFalse(mesh.GetDoubleSidedAttr().HasAuthoredValue())
mesh.CreateDoubleSidedAttr(False, False)
self.assertTrue(mesh.GetDoubleSidedAttr().HasAuthoredValue())
# Number 2: Sparse authoring demotes to dense for non-defed prim
overMesh = UsdGeom.Mesh(stage.OverridePrim('/overMesh'))
overMesh.CreateDoubleSidedAttr(False, True)
self.assertTrue(overMesh.GetDoubleSidedAttr().HasAuthoredValue())
self.assertEqual(overMesh.GetDoubleSidedAttr().Get(), False)
overMesh.CreateDoubleSidedAttr(True, True)
self.assertEqual(overMesh.GetDoubleSidedAttr().Get(), True)
# make it a defined mesh, and sanity check it still evals the same
mesh2 = UsdGeom.Mesh.Define(stage, "/overMesh")
self.assertEqual(overMesh.GetDoubleSidedAttr().Get(), True)
# Check querying of fallback values.
sphere = UsdGeom.Sphere.Define(stage, "/Sphere")
radius = sphere.GetRadiusAttr()
self.assertTrue(radius.HasFallbackValue())
radiusQuery = Usd.AttributeQuery(radius)
self.assertTrue(radiusQuery.HasFallbackValue())
def test_DefineSchema(self):
s = Usd.Stage.CreateInMemory()
parent = s.OverridePrim('/parent')
self.assertTrue(parent)
# Make a subscope.
scope = UsdGeom.Scope.Define(s, '/parent/subscope')
self.assertTrue(scope)
# Assert that a simple find or create gives us the scope back.
self.assertTrue(s.OverridePrim('/parent/subscope'))
self.assertEqual(s.OverridePrim('/parent/subscope'), scope.GetPrim())
# Try to make a mesh at subscope's path. This transforms the scope into a
# mesh, since Define() always authors typeName.
mesh = UsdGeom.Mesh.Define(s, '/parent/subscope')
self.assertTrue(mesh)
self.assertTrue(not scope)
# Make a mesh at a different path, should work.
mesh = UsdGeom.Mesh.Define(s, '/parent/mesh')
self.assertTrue(mesh)
def test_BasicMetadataCases(self):
s = Usd.Stage.CreateInMemory()
spherePrim = UsdGeom.Sphere.Define(s, '/sphere').GetPrim()
radius = spherePrim.GetAttribute('radius')
self.assertTrue(radius.HasMetadata('custom'))
self.assertTrue(radius.HasMetadata('typeName'))
self.assertTrue(radius.HasMetadata('variability'))
self.assertTrue(radius.IsDefined())
self.assertTrue(not radius.IsCustom())
self.assertEqual(radius.GetTypeName(), 'double')
allMetadata = radius.GetAllMetadata()
self.assertEqual(allMetadata['typeName'], 'double')
self.assertEqual(allMetadata['variability'], Sdf.VariabilityVarying)
self.assertEqual(allMetadata['custom'], False)
# Author a custom property spec.
layer = s.GetRootLayer()
sphereSpec = layer.GetPrimAtPath('/sphere')
radiusSpec = Sdf.AttributeSpec(
sphereSpec, 'radius', Sdf.ValueTypeNames.Double,
variability=Sdf.VariabilityUniform, declaresCustom=True)
self.assertTrue(radiusSpec.custom)
self.assertEqual(radiusSpec.variability, Sdf.VariabilityUniform)
# Definition should win.
self.assertTrue(not radius.IsCustom())
self.assertEqual(radius.GetVariability(), Sdf.VariabilityVarying)
allMetadata = radius.GetAllMetadata()
self.assertEqual(allMetadata['typeName'], 'double')
self.assertEqual(allMetadata['variability'], Sdf.VariabilityVarying)
self.assertEqual(allMetadata['custom'], False)
# List fields on 'visibility' attribute -- should include 'allowedTokens',
# provided by the property definition.
visibility = spherePrim.GetAttribute('visibility')
self.assertTrue(visibility.IsDefined())
self.assertTrue('allowedTokens' in visibility.GetAllMetadata())
# Assert that attribute fallback values are returned for builtin attributes.
do = spherePrim.GetAttribute('primvars:displayOpacity')
self.assertTrue(do.IsDefined())
self.assertTrue(do.Get() is None)
def test_Camera(self):
from pxr import Gf
stage = Usd.Stage.CreateInMemory()
camera = UsdGeom.Camera.Define(stage, "/Camera")
self.assertTrue(camera.GetPrim().IsA(UsdGeom.Xformable)) # Camera is Xformable
self.assertEqual(camera.GetProjectionAttr().Get(), 'perspective')
camera.GetProjectionAttr().Set('orthographic')
self.assertEqual(camera.GetProjectionAttr().Get(), 'orthographic')
self.assertTrue(Gf.IsClose(camera.GetHorizontalApertureAttr().Get(),
0.825 * 25.4, 1e-5))
camera.GetHorizontalApertureAttr().Set(3.0)
self.assertEqual(camera.GetHorizontalApertureAttr().Get(), 3.0)
self.assertTrue(Gf.IsClose(camera.GetVerticalApertureAttr().Get(),
0.602 * 25.4, 1e-5))
camera.GetVerticalApertureAttr().Set(2.0)
self.assertEqual(camera.GetVerticalApertureAttr().Get(), 2.0)
self.assertEqual(camera.GetFocalLengthAttr().Get(), 50.0)
camera.GetFocalLengthAttr().Set(35.0)
self.assertTrue(Gf.IsClose(camera.GetFocalLengthAttr().Get(), 35.0, 1e-5))
self.assertEqual(camera.GetClippingRangeAttr().Get(), Gf.Vec2f(1, 1000000))
camera.GetClippingRangeAttr().Set(Gf.Vec2f(5, 10))
self.assertTrue(Gf.IsClose(camera.GetClippingRangeAttr().Get(),
Gf.Vec2f(5, 10), 1e-5))
self.assertEqual(camera.GetClippingPlanesAttr().Get(), Vt.Vec4fArray())
cp = Vt.Vec4fArray([(1, 2, 3, 4), (8, 7, 6, 5)])
camera.GetClippingPlanesAttr().Set(cp)
self.assertEqual(camera.GetClippingPlanesAttr().Get(), cp)
cp = Vt.Vec4fArray()
camera.GetClippingPlanesAttr().Set(cp)
self.assertEqual(camera.GetClippingPlanesAttr().Get(), cp)
self.assertEqual(camera.GetFStopAttr().Get(), 0.0)
camera.GetFStopAttr().Set(2.8)
self.assertTrue(Gf.IsClose(camera.GetFStopAttr().Get(), 2.8, 1e-5))
self.assertEqual(camera.GetFocusDistanceAttr().Get(), 0.0)
camera.GetFocusDistanceAttr().Set(10.0)
self.assertEqual(camera.GetFocusDistanceAttr().Get(), 10.0)
def test_Points(self):
stage = Usd.Stage.CreateInMemory()
# Points Tests
schema = UsdGeom.Points.Define(stage, "/Points")
self.assertTrue(schema)
# Test that id's roundtrip properly, for big numbers, and negative numbers
ids = [8589934592, 1099511627776, 0, -42]
schema.CreateIdsAttr(ids)
resolvedIds = list(schema.GetIdsAttr().Get()) # convert VtArray to list
self.assertEqual(ids, resolvedIds)
def test_Revert_Bug111239(self):
# This used to test a change for Bug111239, but now tests that this
# fix has been reverted. We no longer allow the C++ typename be used as
# a prim's typename.
s = Usd.Stage.CreateInMemory()
sphere = s.DefinePrim('/sphere', typeName='Sphere')
tfTypeName = UsdGeom.Sphere._GetStaticTfType().typeName
self.assertEqual(tfTypeName, 'UsdGeomSphere')
usdGeomSphere = s.DefinePrim('/usdGeomSphere', typeName='tfTypeName')
self.assertTrue(UsdGeom.Sphere(sphere))
self.assertTrue('radius' in [a.GetName() for a in sphere.GetAttributes()])
self.assertFalse(UsdGeom.Sphere(usdGeomSphere))
self.assertFalse('radius' in [a.GetName() for a in usdGeomSphere.GetAttributes()])
def test_ComputeExtent(self):
from pxr import Gf
# Create some simple test cases
allPoints = [
[(1, 1, 0)], # Zero-Volume Extent Test
[(0, 0, 0)], # Simple Width Test
[(-1, -1, -1), (1, 1, 1)], # Multiple Width Test
[(-1, -1, -1), (1, 1, 1)], # Erroneous Widths/Points Test
# Complex Test, Many Points/Widths
[(3, -1, 5), (-1.5, 0, 3), (1, 3, -2), (2, 2, -4)],
]
allWidths = [
[0], # Zero-Volume Extent Test
[2], # Simple Width Test
[2, 4], # Multiple Width Test
[2, 4, 5], # Erroneous Widths/Points Test
[1, 2, 2, 1] # Complex Test, Many Points/Widths
]
pointBasedSolutions = [
[(1, 1, 0), (1, 1, 0)], # Zero-Volume Extent Test
[(0, 0, 0), (0, 0, 0)], # Simple Width Test
[(-1, -1, -1), (1, 1, 1)], # Multiple Width Test
# Erroneous Widths/Points Test -> Ok For Point-Based
[(-1, -1, -1), (1, 1, 1)],
[(-1.5, -1, -4), (3, 3, 5)] # Complex Test, Many Points/Widths
]
pointsSolutions = [
[(1, 1, 0), (1, 1, 0)], # Zero-Volume Extent Test
[(-1, -1, -1), (1, 1, 1)], # Simple Width Test
[(-2, -2, -2), (3, 3, 3)], # Multiple Width Test
# Erroneous Widths/Points Test -> Returns None
None,
[(-2.5, -1.5, -4.5), (3.5, 4, 5.5)] # Complex Test, Many Points/Widths
]
# Perform the correctness tests for PointBased and Points
# Test for empty points prims
emptyPoints = []
extremeExtentArr = UsdGeom.PointBased.ComputeExtent(emptyPoints)
# We need to map the contents of extremeExtentArr to floats from
# num.float32s due to the way Gf.Vec3f is wrapped out
# XXX: This is awful, it'd be nice to not do it
extremeExtentRange = Gf.Range3f(Gf.Vec3f(*map(float, extremeExtentArr[0])),
Gf.Vec3f(*map(float, extremeExtentArr[1])))
self.assertTrue(extremeExtentRange.IsEmpty())
# PointBased Test
numDataSets = len(allPoints)
for i in range(numDataSets):
pointsData = allPoints[i]
expectedExtent = pointBasedSolutions[i]
actualExtent = UsdGeom.PointBased.ComputeExtent(pointsData)
for a, b in zip(expectedExtent, actualExtent):
self.assertTrue(Gf.IsClose(a, b, 1e-5))
# Points Test
for i in range(numDataSets):
pointsData = allPoints[i]
widthsData = allWidths[i]
expectedExtent = pointsSolutions[i]
actualExtent = UsdGeom.Points.ComputeExtent(pointsData, widthsData)
if actualExtent is not None and expectedExtent is not None:
for a, b in zip(expectedExtent, actualExtent):
self.assertTrue(Gf.IsClose(a, b, 1e-5))
# Compute extent via generic UsdGeom.Boundable API
s = Usd.Stage.CreateInMemory()
pointsPrim = UsdGeom.Points.Define(s, "/Points")
pointsPrim.CreatePointsAttr(pointsData)
pointsPrim.CreateWidthsAttr(widthsData)
actualExtent = UsdGeom.Boundable.ComputeExtentFromPlugins(
pointsPrim, Usd.TimeCode.Default())
if actualExtent is not None and expectedExtent is not None:
for a, b in zip(expectedExtent, list(actualExtent)):
self.assertTrue(Gf.IsClose(a, b, 1e-5))
# Mesh Test
for i in range(numDataSets):
pointsData = allPoints[i]
expectedExtent = pointBasedSolutions[i]
# Compute extent via generic UsdGeom.Boundable API.
# UsdGeom.Mesh does not have its own compute extent function, so
# it should fall back to the extent for PointBased prims.
s = Usd.Stage.CreateInMemory()
meshPrim = UsdGeom.Mesh.Define(s, "/Mesh")
meshPrim.CreatePointsAttr(pointsData)
actualExtent = UsdGeom.Boundable.ComputeExtentFromPlugins(
meshPrim, Usd.TimeCode.Default())
for a, b in zip(expectedExtent, actualExtent):
self.assertTrue(Gf.IsClose(a, b, 1e-5))
# Test UsdGeomCurves
curvesPoints = [
[(0,0,0), (1,1,1), (2,1,1), (3,0,0)], # Test Curve with 1 width
[(0,0,0), (1,1,1), (2,1,1), (3,0,0)], # Test Curve with 2 widths
[(0,0,0), (1,1,1), (2,1,1), (3,0,0)] # Test Curve with no width
]
curvesWidths = [
[1], # Test Curve with 1 width
[.5, .1], # Test Curve with 2 widths
[] # Test Curve with no width
]
curvesSolutions = [
[(-.5,-.5,-.5), (3.5,1.5,1.5)], # Test Curve with 1 width
[(-.25,-.25,-.25), (3.25,1.25,1.25)], # Test Curve with 2 widths (MAX)
[(0,0,0), (3,1,1)], # Test Curve with no width
]
# Perform the actual v. expected comparison
numDataSets = len(curvesPoints)
for i in range(numDataSets):
pointsData = curvesPoints[i]
widths = curvesWidths[i]
expectedExtent = curvesSolutions[i]
actualExtent = UsdGeom.Curves.ComputeExtent(pointsData, widths)
for a, b in zip(expectedExtent, actualExtent):
self.assertTrue(Gf.IsClose(a, b, 1e-5))
# Compute extent via generic UsdGeom.Boundable API
s = Usd.Stage.CreateInMemory()
nurbsCurvesPrim = UsdGeom.NurbsCurves.Define(s, "/NurbsCurves")
nurbsCurvesPrim.CreatePointsAttr(pointsData)
nurbsCurvesPrim.CreateWidthsAttr(widths)
actualExtent = UsdGeom.Boundable.ComputeExtentFromPlugins(
nurbsCurvesPrim, Usd.TimeCode.Default())
for a, b in zip(expectedExtent, actualExtent):
self.assertTrue(Gf.IsClose(a, b, 1e-5))
basisCurvesPrim = UsdGeom.BasisCurves.Define(s, "/BasisCurves")
basisCurvesPrim.CreatePointsAttr(pointsData)
basisCurvesPrim.CreateWidthsAttr(widths)
actualExtent = UsdGeom.Boundable.ComputeExtentFromPlugins(
basisCurvesPrim, Usd.TimeCode.Default())
for a, b in zip(expectedExtent, actualExtent):
self.assertTrue(Gf.IsClose(a, b, 1e-5))
def test_TypeUsage(self):
# Perform Type-Ness Checking for ComputeExtent
pointsAsList = [(0, 0, 0), (1, 1, 1), (2, 2, 2)]
pointsAsVec3fArr = Vt.Vec3fArray(pointsAsList)
comp = UsdGeom.PointBased.ComputeExtent
expectedExtent = comp(pointsAsVec3fArr)
actualExtent = comp(pointsAsList)
for a, b in zip(expectedExtent, actualExtent):
self.assertTrue(Gf.IsClose(a, b, 1e-5))
def test_Bug116593(self):
from pxr import Gf
s = Usd.Stage.CreateInMemory()
prim = s.DefinePrim('/sphere', typeName='Sphere')
# set with list of tuples
vec = [(1,2,2),(12,3,3)]
self.assertTrue(UsdGeom.ModelAPI(prim).SetExtentsHint(vec))
self.assertEqual(UsdGeom.ModelAPI(prim).GetExtentsHint()[0], Gf.Vec3f(1,2,2))
self.assertEqual(UsdGeom.ModelAPI(prim).GetExtentsHint()[1], Gf.Vec3f(12,3,3))
# set with Gf vecs
vec = [Gf.Vec3f(1,2,2), Gf.Vec3f(1,1,1)]
self.assertTrue(UsdGeom.ModelAPI(prim).SetExtentsHint(vec))
self.assertEqual(UsdGeom.ModelAPI(prim).GetExtentsHint()[0], Gf.Vec3f(1,2,2))
self.assertEqual(UsdGeom.ModelAPI(prim).GetExtentsHint()[1], Gf.Vec3f(1,1,1))
def test_Typed(self):
from pxr import Tf
xform = Tf.Type.FindByName("UsdGeomXform")
imageable = Tf.Type.FindByName("UsdGeomImageable")
geomModelAPI = Tf.Type.FindByName("UsdGeomModelAPI")
self.assertTrue(Usd.SchemaRegistry.IsTyped(xform))
self.assertTrue(Usd.SchemaRegistry.IsTyped(imageable))
self.assertFalse(Usd.SchemaRegistry.IsTyped(geomModelAPI))
def test_Concrete(self):
from pxr import Tf
xform = Tf.Type.FindByName("UsdGeomXform")
imageable = Tf.Type.FindByName("UsdGeomImageable")
geomModelAPI = Tf.Type.FindByName("UsdGeomModelAPI")
self.assertTrue(Usd.SchemaRegistry().IsConcrete(xform))
self.assertFalse(Usd.SchemaRegistry().IsConcrete(imageable))
self.assertFalse(Usd.SchemaRegistry().IsConcrete(geomModelAPI))
def test_Apply(self):
s = Usd.Stage.CreateInMemory('AppliedSchemas.usd')
root = s.DefinePrim('/hello')
self.assertEqual([], root.GetAppliedSchemas())
# Check duplicates
UsdGeom.MotionAPI.Apply(root)
self.assertEqual(['MotionAPI'], root.GetAppliedSchemas())
UsdGeom.MotionAPI.Apply(root)
self.assertEqual(['MotionAPI'], root.GetAppliedSchemas())
# Ensure duplicates aren't picked up
UsdGeom.ModelAPI.Apply(root)
self.assertEqual(['MotionAPI', 'GeomModelAPI'], root.GetAppliedSchemas())
# Verify that we get exceptions but don't crash when applying to the
# null prim.
with self.assertRaises(Tf.ErrorException):
self.assertFalse(UsdGeom.MotionAPI.Apply(Usd.Prim()))
with self.assertRaises(Tf.ErrorException):
self.assertFalse(UsdGeom.ModelAPI.Apply(Usd.Prim()))
def test_IsATypeless(self):
from pxr import Usd, Tf
s = Usd.Stage.CreateInMemory()
spherePrim = s.DefinePrim('/sphere', typeName='Sphere')
typelessPrim = s.DefinePrim('/regular')
types = [Tf.Type.FindByName('UsdGeomSphere'),
Tf.Type.FindByName('UsdGeomGprim'),
Tf.Type.FindByName('UsdGeomBoundable'),
Tf.Type.FindByName('UsdGeomXformable'),
Tf.Type.FindByName('UsdGeomImageable'),
Tf.Type.FindByName('UsdTyped')]
# Our sphere prim should return true on IsA queries for Sphere
# and everything it inherits from. Our plain prim should return false
# for all of them.
for t in types:
self.assertTrue(spherePrim.IsA(t))
self.assertFalse(typelessPrim.IsA(t))
def test_HasAPI(self):
from pxr import Usd, Tf
s = Usd.Stage.CreateInMemory()
prim = s.DefinePrim('/prim')
types = [Tf.Type.FindByName('UsdGeomMotionAPI'),
Tf.Type.FindByName('UsdGeomModelAPI')]
# Check that no APIs have yet been applied
for t in types:
self.assertFalse(prim.HasAPI(t))
# Apply our schemas to this prim
UsdGeom.ModelAPI.Apply(prim)
UsdGeom.MotionAPI.Apply(prim)
# Check that all our applied schemas show up
for t in types:
self.assertTrue(prim.HasAPI(t))
# Check that we get an exception for unknown and non-API types
with self.assertRaises(Tf.ErrorException):
prim.HasAPI(Tf.Type.Unknown)
with self.assertRaises(Tf.ErrorException):
prim.HasAPI(Tf.Type.FindByName('UsdGeomXform'))
with self.assertRaises(Tf.ErrorException):
prim.HasAPI(Tf.Type.FindByName('UsdGeomImageable'))
with self.assertRaises(Tf.ErrorException):
# Test with a non-applied API schema.
prim.HasAPI(Tf.Type.FindByName('UsdModelAPI'))
if __name__ == "__main__":
unittest.main()
| 42.69326
| 100
| 0.63055
| 3,425
| 31,038
| 5.706277
| 0.16292
| 0.070917
| 0.027937
| 0.028142
| 0.456048
| 0.358831
| 0.312679
| 0.27763
| 0.258033
| 0.209681
| 0
| 0.018041
| 0.257104
| 31,038
| 726
| 101
| 42.752066
| 0.82956
| 0.204266
| 0
| 0.384449
| 0
| 0
| 0.04174
| 0.002734
| 0
| 0
| 0
| 0
| 0.414687
| 1
| 0.034557
| false
| 0
| 0.019438
| 0
| 0.056156
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1694a3aec6658351c14a81b2e91e92955b6cb8a7
| 341
|
py
|
Python
|
lucky_guess/__init__.py
|
mfinzi/lucky-guess-chemist
|
01898b733dc7d026f70d0cb6337309cb600502fb
|
[
"MIT"
] | null | null | null |
lucky_guess/__init__.py
|
mfinzi/lucky-guess-chemist
|
01898b733dc7d026f70d0cb6337309cb600502fb
|
[
"MIT"
] | null | null | null |
lucky_guess/__init__.py
|
mfinzi/lucky-guess-chemist
|
01898b733dc7d026f70d0cb6337309cb600502fb
|
[
"MIT"
] | null | null | null |
import importlib
import pkgutil
__all__ = []
for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
module = importlib.import_module('.'+module_name,package=__name__)
try:
globals().update({k: getattr(module, k) for k in module.__all__})
__all__ += module.__all__
except AttributeError: continue
| 34.1
| 73
| 0.71261
| 42
| 341
| 5.095238
| 0.547619
| 0.140187
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 341
| 10
| 74
| 34.1
| 0.767025
| 0
| 0
| 0
| 0
| 0
| 0.002924
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
16a335de057546c0e95c5699aa9470bc30a7f928
| 334
|
py
|
Python
|
src/djangoreactredux/wsgi.py
|
noscripter/django-react-redux-jwt-base
|
078fb86005db106365df51fa11d8602fa432e3c3
|
[
"MIT"
] | 4
|
2016-07-03T08:18:45.000Z
|
2018-12-25T07:47:41.000Z
|
src/djangoreactredux/wsgi.py
|
noscripter/django-react-redux-jwt-base
|
078fb86005db106365df51fa11d8602fa432e3c3
|
[
"MIT"
] | 2
|
2021-03-20T00:02:08.000Z
|
2021-06-10T23:34:26.000Z
|
src/djangoreactredux/wsgi.py
|
noscripter/django-react-redux-jwt-base
|
078fb86005db106365df51fa11d8602fa432e3c3
|
[
"MIT"
] | 1
|
2019-08-02T14:51:41.000Z
|
2019-08-02T14:51:41.000Z
|
"""
WSGI config for django-react-redux-jwt-base project.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djangoreactredux.settings.dev")
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
| 23.857143
| 80
| 0.820359
| 40
| 334
| 6.7
| 0.575
| 0.052239
| 0.134328
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086826
| 334
| 13
| 81
| 25.692308
| 0.878689
| 0.155689
| 0
| 0
| 0
| 0
| 0.186813
| 0.186813
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
16a89cacbc82dd93659b9a841883e22a139d8576
| 447
|
py
|
Python
|
main.py
|
1999foxes/run-cmd-from-websocket
|
0e2a080fe92b93c6cba63dfe5649ac2a3e745009
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
1999foxes/run-cmd-from-websocket
|
0e2a080fe92b93c6cba63dfe5649ac2a3e745009
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
1999foxes/run-cmd-from-websocket
|
0e2a080fe92b93c6cba63dfe5649ac2a3e745009
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import json
import logging
import websockets
logging.basicConfig()
async def counter(websocket, path):
try:
print("connect")
async for message in websocket:
print(message)
finally:
USERS.remove(websocket)
async def main():
async with websockets.serve(counter, "localhost", 5000):
await asyncio.Future() # run forever
if __name__ == "__main__":
asyncio.run(main())
| 17.88
| 60
| 0.657718
| 50
| 447
| 5.72
| 0.62
| 0.055944
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011834
| 0.243848
| 447
| 24
| 61
| 18.625
| 0.83432
| 0.024609
| 0
| 0
| 0
| 0
| 0.0553
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.235294
| 0
| 0.235294
| 0.117647
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16a8a652721deb01765dac84306cf8e790d8b09a
| 3,998
|
py
|
Python
|
3d_Vnet/3dvnet.py
|
GingerSpacetail/Brain-Tumor-Segmentation-and-Survival-Prediction-using-Deep-Neural-Networks
|
f627ce48e44bcc7d295ee1cf4086bfdfd7705d44
|
[
"MIT"
] | 100
|
2020-05-21T10:23:31.000Z
|
2022-03-26T18:26:38.000Z
|
3d_Vnet/3dvnet.py
|
GingerSpacetail/Brain-Tumor-Segmentation-and-Survival-Prediction-using-Deep-Neural-Networks
|
f627ce48e44bcc7d295ee1cf4086bfdfd7705d44
|
[
"MIT"
] | 3
|
2020-08-19T18:14:01.000Z
|
2021-01-04T09:53:07.000Z
|
3d_Vnet/3dvnet.py
|
GingerSpacetail/Brain-Tumor-Segmentation-and-Survival-Prediction-using-Deep-Neural-Networks
|
f627ce48e44bcc7d295ee1cf4086bfdfd7705d44
|
[
"MIT"
] | 25
|
2020-09-05T04:19:22.000Z
|
2022-02-09T19:30:29.000Z
|
import random
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#%matplotlib inline
import tensorflow as tf
import keras.backend as K
from keras.utils import to_categorical
from keras import metrics
from keras.models import Model, load_model
from keras.layers import Input, BatchNormalization, Activation, Dense, Dropout,Maximum
from keras.layers.core import Lambda, RepeatVector, Reshape
from keras.layers.convolutional import Conv2D, Conv2DTranspose,Conv3D,Conv3DTranspose
from keras.layers.pooling import MaxPooling2D, GlobalMaxPool2D,MaxPooling3D
from keras.layers.merge import concatenate, add
from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau
from keras.optimizers import Adam
from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img
from skimage.io import imread, imshow, concatenate_images
from skimage.transform import resize
from sklearn.utils import class_weight
from keras.callbacks import ModelCheckpoint
from keras.callbacks import CSVLogger
from keras.callbacks import EarlyStopping
from keras.layers.advanced_activations import PReLU
import os
from skimage.io import imread, imshow, concatenate_images
from skimage.transform import resize
# from medpy.io import load
import numpy as np
#import cv2
import nibabel as nib
from PIL import Image
def conv_block(input_mat,num_filters,kernel_size,batch_norm):
X = Conv3D(num_filters,kernel_size=(kernel_size,kernel_size,kernel_size),strides=(1,1,1),padding='same')(input_mat)
if batch_norm:
X = BatchNormalization()(X)
X = Activation('relu')(X)
X = Conv3D(num_filters,kernel_size=(kernel_size,kernel_size,kernel_size),strides=(1,1,1),padding='same')(X)
if batch_norm:
X = BatchNormalization()(X)
X = Activation('relu')(X)
X = add([input_mat,X]);
return X
def Vnet_3d(input_img, n_filters = 8, dropout = 0.2, batch_norm = True):
#c1 = conv_block(input_img,n_filters,3,batch_norm)
c1 = Conv3D(n_filters,kernel_size = (5,5,5) , strides = (1,1,1) , padding='same')(input_img)
#c1 = add([c1,input_img])
c2 = Conv3D(n_filters*2,kernel_size = (2,2,2) , strides = (2,2,2) , padding = 'same' )(c1)
c3 = conv_block(c2 , n_filters*2,5,True)
p3 = Conv3D(n_filters*4,kernel_size = (2,2,2) , strides = (2,2,2), padding = 'same')(c3)
p3 = Dropout(dropout)(p3)
c4 = conv_block(p3, n_filters*4,5,True)
p4 = Conv3D(n_filters*8,kernel_size = (2,2,2) , strides = (2,2,2) , padding='same')(c4)
p4 = Dropout(dropout)(p4)
c5 = conv_block(p4, n_filters*8,5,True)
p6 = Conv3D(n_filters*16,kernel_size = (2,2,2) , strides = (2,2,2) , padding='same')(c5)
p6 = Dropout(dropout)(p6)
#c6 = conv_block(p5, n_filters*8,5,True)
#p6 = Conv3D(n_filters*16,kernel_size = (2,2,2) , strides = (2,2,2) , padding='same')(c6)
p7 = conv_block(p6,n_filters*16,5,True)
u6 = Conv3DTranspose(n_filters*8, (2,2,2), strides=(2, 2, 2), padding='same')(p7);
u6 = concatenate([u6,c5]);
c7 = conv_block(u6,n_filters*16,5,True)
c7 = Dropout(dropout)(c7)
u7 = Conv3DTranspose(n_filters*4,(2,2,2),strides = (2,2,2) , padding= 'same')(c7);
u8 = concatenate([u7,c4]);
c8 = conv_block(u8,n_filters*8,5,True)
c8 = Dropout(dropout)(c8)
u9 = Conv3DTranspose(n_filters*2,(2,2,2),strides = (2,2,2) , padding= 'same')(c8);
u9 = concatenate([u9,c3]);
c9 = conv_block(u9,n_filters*4,5,True)
c9 = Dropout(dropout)(c9)
u10 = Conv3DTranspose(n_filters,(2,2,2),strides = (2,2,2) , padding= 'same')(c9);
u10 = concatenate([u10,c1]);
c10 = Conv3D(n_filters*2,kernel_size = (5,5,5),strides = (1,1,1) , padding = 'same')(u10);
c10 = Dropout(dropout)(c10)
c10 = add([c10,u10]);
#c9 = conv_block(u9,n_filters,3,batch_norm)
outputs = Conv3D(4, (1,1,1), activation='softmax')(c10)
model = Model(inputs=input_img, outputs=outputs)
return model
| 34.465517
| 118
| 0.693847
| 617
| 3,998
| 4.36953
| 0.210697
| 0.027448
| 0.021142
| 0.033383
| 0.428042
| 0.35089
| 0.306751
| 0.303042
| 0.303042
| 0.267433
| 0
| 0.064865
| 0.167084
| 3,998
| 115
| 119
| 34.765217
| 0.744745
| 0.074037
| 0
| 0.162162
| 0
| 0
| 0.017603
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027027
| false
| 0
| 0.405405
| 0
| 0.459459
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
16a9cd5f8c3947e5f770014cb07528f411173928
| 18,818
|
py
|
Python
|
lib/networks/Resnet50_train.py
|
yangxue0827/TF_Deformable_Net
|
00c86380fd2725ebe7ae22f41d460ffc0bca378d
|
[
"MIT"
] | 193
|
2017-07-19T14:29:38.000Z
|
2021-10-20T07:35:42.000Z
|
lib/networks/Resnet50_train.py
|
yangxue0827/TF_Deformable_Net
|
00c86380fd2725ebe7ae22f41d460ffc0bca378d
|
[
"MIT"
] | 29
|
2017-07-24T10:07:22.000Z
|
2020-01-03T20:38:36.000Z
|
lib/networks/Resnet50_train.py
|
Zardinality/TF_Deformable_Net
|
00c86380fd2725ebe7ae22f41d460ffc0bca378d
|
[
"MIT"
] | 67
|
2017-07-27T14:32:47.000Z
|
2021-12-27T13:10:37.000Z
|
# --------------------------------------------------------
# TFFRCNN - Resnet50
# Copyright (c) 2016
# Licensed under The MIT License [see LICENSE for details]
# Written by miraclebiu
# --------------------------------------------------------
import tensorflow as tf
from .network import Network
from ..fast_rcnn.config import cfg
class Resnet50_train(Network):
def __init__(self, trainable=True):
self.inputs = []
self.data = tf.placeholder(tf.float32, shape=[None, None, None, 3], name='data')
self.im_info = tf.placeholder(tf.float32, shape=[None, 3], name='im_info')
self.gt_boxes = tf.placeholder(tf.float32, shape=[None, 5], name='gt_boxes')
self.gt_ishard = tf.placeholder(tf.int32, shape=[None], name='gt_ishard')
self.dontcare_areas = tf.placeholder(tf.float32, shape=[None, 4], name='dontcare_areas')
self.keep_prob = tf.placeholder(tf.float32)
self.layers = dict({'data':self.data, 'im_info':self.im_info, 'gt_boxes':self.gt_boxes,\
'gt_ishard': self.gt_ishard, 'dontcare_areas': self.dontcare_areas})
self.trainable = trainable
self.setup()
def setup(self):
n_classes = cfg.NCLASSES
# anchor_scales = [8, 16, 32]
anchor_scales = cfg.ANCHOR_SCALES
_feat_stride = [16, ]
(self.feed('data')
.conv(7, 7, 64, 2, 2, relu=False, name='conv1')
.batch_normalization(relu=True, name='bn_conv1', is_training=False)
.max_pool(3, 3, 2, 2, padding='VALID',name='pool1')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res2a_branch1')
.batch_normalization(name='bn2a_branch1',is_training=False,relu=False))
(self.feed('pool1')
.conv(1, 1, 64, 1, 1, biased=False, relu=False, name='res2a_branch2a')
.batch_normalization(relu=True, name='bn2a_branch2a',is_training=False)
.conv(3, 3, 64, 1, 1, biased=False, relu=False, name='res2a_branch2b')
.batch_normalization(relu=True, name='bn2a_branch2b',is_training=False)
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res2a_branch2c')
.batch_normalization(name='bn2a_branch2c',is_training=False,relu=False))
(self.feed('bn2a_branch1',
'bn2a_branch2c')
.add(name='res2a')
.relu(name='res2a_relu')
.conv(1, 1, 64, 1, 1, biased=False, relu=False, name='res2b_branch2a')
.batch_normalization(relu=True, name='bn2b_branch2a',is_training=False)
.conv(3, 3, 64, 1, 1, biased=False, relu=False, name='res2b_branch2b')
.batch_normalization(relu=True, name='bn2b_branch2b',is_training=False)
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res2b_branch2c')
.batch_normalization(name='bn2b_branch2c',is_training=False,relu=False))
(self.feed('res2a_relu',
'bn2b_branch2c')
.add(name='res2b')
.relu(name='res2b_relu')
.conv(1, 1, 64, 1, 1, biased=False, relu=False, name='res2c_branch2a')
.batch_normalization(relu=True, name='bn2c_branch2a',is_training=False)
.conv(3, 3, 64, 1, 1, biased=False, relu=False, name='res2c_branch2b')
.batch_normalization(relu=True, name='bn2c_branch2b',is_training=False)
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res2c_branch2c')
.batch_normalization(name='bn2c_branch2c',is_training=False,relu=False))
(self.feed('res2b_relu',
'bn2c_branch2c')
.add(name='res2c')
.relu(name='res2c_relu')
.conv(1, 1, 512, 2, 2, biased=False, relu=False, name='res3a_branch1', padding='VALID')
.batch_normalization(name='bn3a_branch1',is_training=False,relu=False))
(self.feed('res2c_relu')
.conv(1, 1, 128, 2, 2, biased=False, relu=False, name='res3a_branch2a', padding='VALID')
.batch_normalization(relu=True, name='bn3a_branch2a',is_training=False)
.conv(3, 3, 128, 1, 1, biased=False, relu=False, name='res3a_branch2b')
.batch_normalization(relu=True, name='bn3a_branch2b',is_training=False)
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res3a_branch2c')
.batch_normalization(name='bn3a_branch2c',is_training=False,relu=False))
(self.feed('bn3a_branch1',
'bn3a_branch2c')
.add(name='res3a')
.relu(name='res3a_relu')
.conv(1, 1, 128, 1, 1, biased=False, relu=False, name='res3b_branch2a')
.batch_normalization(relu=True, name='bn3b_branch2a',is_training=False)
.conv(3, 3, 128, 1, 1, biased=False, relu=False, name='res3b_branch2b')
.batch_normalization(relu=True, name='bn3b_branch2b',is_training=False)
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res3b_branch2c')
.batch_normalization(name='bn3b_branch2c',is_training=False,relu=False))
(self.feed('res3a_relu',
'bn3b_branch2c')
.add(name='res3b')
.relu(name='res3b_relu')
.conv(1, 1, 128, 1, 1, biased=False, relu=False, name='res3c_branch2a')
.batch_normalization(relu=True, name='bn3c_branch2a',is_training=False)
.conv(3, 3, 128, 1, 1, biased=False, relu=False, name='res3c_branch2b')
.batch_normalization(relu=True, name='bn3c_branch2b',is_training=False)
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res3c_branch2c')
.batch_normalization(name='bn3c_branch2c',is_training=False,relu=False))
(self.feed('res3b_relu',
'bn3c_branch2c')
.add(name='res3c')
.relu(name='res3c_relu')
.conv(1, 1, 128, 1, 1, biased=False, relu=False, name='res3d_branch2a')
.batch_normalization(relu=True, name='bn3d_branch2a',is_training=False)
.conv(3, 3, 128, 1, 1, biased=False, relu=False, name='res3d_branch2b')
.batch_normalization(relu=True, name='bn3d_branch2b',is_training=False)
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res3d_branch2c')
.batch_normalization(name='bn3d_branch2c',is_training=False,relu=False))
(self.feed('res3c_relu',
'bn3d_branch2c')
.add(name='res3d')
.relu(name='res3d_relu')
.conv(1, 1, 1024, 2, 2, biased=False, relu=False, name='res4a_branch1', padding='VALID')
.batch_normalization(name='bn4a_branch1',is_training=False,relu=False))
(self.feed('res3d_relu')
.conv(1, 1, 256, 2, 2, biased=False, relu=False, name='res4a_branch2a', padding='VALID')
.batch_normalization(relu=True, name='bn4a_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4a_branch2b')
.batch_normalization(relu=True, name='bn4a_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4a_branch2c')
.batch_normalization(name='bn4a_branch2c',is_training=False,relu=False))
(self.feed('bn4a_branch1',
'bn4a_branch2c')
.add(name='res4a')
.relu(name='res4a_relu')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res4b_branch2a')
.batch_normalization(relu=True, name='bn4b_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4b_branch2b')
.batch_normalization(relu=True, name='bn4b_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4b_branch2c')
.batch_normalization(name='bn4b_branch2c',is_training=False,relu=False))
(self.feed('res4a_relu',
'bn4b_branch2c')
.add(name='res4b')
.relu(name='res4b_relu')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res4c_branch2a')
.batch_normalization(relu=True, name='bn4c_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4c_branch2b')
.batch_normalization(relu=True, name='bn4c_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4c_branch2c')
.batch_normalization(name='bn4c_branch2c',is_training=False,relu=False))
(self.feed('res4b_relu',
'bn4c_branch2c')
.add(name='res4c')
.relu(name='res4c_relu')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res4d_branch2a')
.batch_normalization(relu=True, name='bn4d_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4d_branch2b')
.batch_normalization(relu=True, name='bn4d_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4d_branch2c')
.batch_normalization(name='bn4d_branch2c',is_training=False,relu=False))
(self.feed('res4c_relu',
'bn4d_branch2c')
.add(name='res4d')
.relu(name='res4d_relu')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res4e_branch2a')
.batch_normalization(relu=True, name='bn4e_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4e_branch2b')
.batch_normalization(relu=True, name='bn4e_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4e_branch2c')
.batch_normalization(name='bn4e_branch2c',is_training=False,relu=False))
(self.feed('res4d_relu',
'bn4e_branch2c')
.add(name='res4e')
.relu(name='res4e_relu')
.conv(1, 1, 256, 1, 1, biased=False, relu=False, name='res4f_branch2a')
.batch_normalization(relu=True, name='bn4f_branch2a',is_training=False)
.conv(3, 3, 256, 1, 1, biased=False, relu=False, name='res4f_branch2b')
.batch_normalization(relu=True, name='bn4f_branch2b',is_training=False)
.conv(1, 1, 1024, 1, 1, biased=False, relu=False, name='res4f_branch2c')
.batch_normalization(name='bn4f_branch2c',is_training=False,relu=False))
(self.feed('res4e_relu',
'bn4f_branch2c')
.add(name='res4f')
.relu(name='res4f_relu'))
#========= RPN ============
(self.feed('res4f_relu')
.conv(3,3,512,1,1,name='rpn_conv/3x3')
.conv(1,1,len(anchor_scales)*3*2 ,1 , 1, padding='VALID', relu = False, name='rpn_cls_score'))
(self.feed('rpn_cls_score', 'gt_boxes', 'gt_ishard', 'dontcare_areas', 'im_info')
.anchor_target_layer(_feat_stride, anchor_scales, name = 'rpn-data' ))
# Loss of rpn_cls & rpn_boxes
(self.feed('rpn_conv/3x3')
.conv(1,1,len(anchor_scales)*3*4, 1, 1, padding='VALID', relu = False, name='rpn_bbox_pred'))
#========= RoI Proposal ============
(self.feed('rpn_cls_score')
.spatial_reshape_layer(2, name = 'rpn_cls_score_reshape')
.spatial_softmax(name='rpn_cls_prob'))
(self.feed('rpn_cls_prob')
.spatial_reshape_layer(len(anchor_scales)*3*2, name = 'rpn_cls_prob_reshape'))
(self.feed('rpn_cls_prob_reshape','rpn_bbox_pred','im_info')
.proposal_layer(_feat_stride, anchor_scales, 'TRAIN',name = 'rpn_rois'))
(self.feed('rpn_rois','gt_boxes', 'gt_ishard', 'dontcare_areas')
.proposal_target_layer(n_classes,name = 'roi-data'))
#========= RCNN ============
(self.feed('res4f_relu')
.conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5a_branch1', padding='VALID')
.batch_normalization(relu=False, name='bn5a_branch1'))
(self.feed('res4f_relu')
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res5a_branch2a', padding='VALID')
.batch_normalization(relu=False, name='bn5a_branch2a')
.relu(name='res5a_branch2a_relu')
.conv(3, 3, 72, 1, 1, biased=True, rate=2, relu=False, name='res5a_branch2b_offset', padding='SAME', initializer='zeros'))
(self.feed('res5a_branch2a_relu', 'res5a_branch2b_offset')
.deform_conv(3, 3, 512, 1, 1, biased=False, rate=2, relu=False, num_deform_group=4, name='res5a_branch2b')
.batch_normalization(relu=False, name='bn5a_branch2b')
.relu(name='res5a_branch2b_relu')
.conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5a_branch2c', padding='VALID')
.batch_normalization(relu=False, name='bn5a_branch2c'))
(self.feed('bn5a_branch1', 'bn5a_branch2c')
.add(name='res5a')
.relu(name='res5a_relu')
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res5b_branch2a', padding='VALID')
.batch_normalization(relu=False, name='bn5b_branch2a')
.relu(name='res5b_branch2a_relu')
.conv(3, 3, 72, 1, 1, biased=True, rate=2, relu=False, name='res5b_branch2b_offset', padding='SAME', initializer='zeros'))
(self.feed('res5b_branch2a_relu', 'res5b_branch2b_offset')
.deform_conv(3, 3, 512, 1, 1, biased=False, rate=2, relu=False, num_deform_group=4, name='res5b_branch2b')
.batch_normalization(relu=False, name='bn5b_branch2b')
.relu(name='res5b_branch2b_relu')
.conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5b_branch2c', padding='VALID')
.batch_normalization(relu=False, name='bn5b_branch2c'))
(self.feed('res5a_relu', 'bn5b_branch2c')
.add(name='res5b')
.relu(name='res5b_relu')
.conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res5c_branch2a', padding='VALID')
.batch_normalization(relu=False, name='bn5c_branch2a')
.relu(name='res5c_branch2a_relu')
.conv(3, 3, 72, 1, 1, biased=True, rate=2, relu=False, name='res5c_branch2b_offset', padding='SAME', initializer='zeros') )
(self.feed('res5c_branch2a_relu', 'res5c_branch2b_offset')
.deform_conv(3, 3, 512, 1, 1, biased=False, rate=2, relu=False, num_deform_group=4, name='res5c_branch2b')
.batch_normalization(relu=False, name='bn5c_branch2b')
.relu(name='res5c_branch2b_relu')
.conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5c_branch2c', padding='VALID')
.batch_normalization(relu=False, name='bn5c_branch2c'))
(self.feed('res5b_relu', 'bn5c_branch2c')
.add(name='res5c')
.relu(name='res5c_relu')
.conv(1, 1, 256, 1, 1, relu=False, name='conv_new_1')
.relu(name='conv_new_1_relu'))
(self.feed('conv_new_1_relu', 'roi-data')
.deform_psroi_pool(group_size=1, pooled_size=7, sample_per_part=4, no_trans=True, part_size=7, output_dim=256, trans_std=1e-1, spatial_scale=0.0625, name='offset_t')
# .flatten_data(name='offset_flatten')
.fc(num_out=7 * 7 * 2, name='offset', relu=False)
.reshape(shape=(-1,2,7,7), name='offset_reshape'))
(self.feed('conv_new_1_relu', 'roi-data', 'offset_reshape')
.deform_psroi_pool(group_size=1, pooled_size=7, sample_per_part=4, no_trans=False, part_size=7, output_dim=256, trans_std=1e-1, spatial_scale=0.0625, name='deformable_roi_pool')
.fc(num_out=1024, name='fc_new_1')
.fc(num_out=1024, name='fc_new_2'))
(self.feed('fc_new_2')
.fc(num_out=n_classes, name='cls_score', relu=False)
.softmax(name='cls_prob'))
(self.feed('fc_new_2')
.fc(num_out=4*n_classes, name='bbox_pred', relu=False))
# (self.feed('res4f_relu','roi-data')
# .roi_pool(7,7,1.0/16,name='res5a_branch2a_roipooling')
# .conv(1, 1, 512, 2, 2, biased=False, relu=False, name='res5a_branch2a', padding='VALID')
# .batch_normalization(relu=True, name='bn5a_branch2a',is_training=False)
# .conv(3, 3, 512, 1, 1, biased=False, relu=False, name='res5a_branch2b')
# .batch_normalization(relu=True, name='bn5a_branch2b',is_training=False)
# .conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5a_branch2c')
# .batch_normalization(name='bn5a_branch2c',is_training=False,relu=False))
# (self.feed('res5a_branch2a_roipooling')
# .conv(1,1,2048,2,2,biased=False, relu=False, name='res5a_branch1', padding='VALID')
# .batch_normalization(name='bn5a_branch1',is_training=False,relu=False))
# (self.feed('bn5a_branch2c','bn5a_branch1')
# .add(name='res5a')
# .relu(name='res5a_relu')
# .conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res5b_branch2a')
# .batch_normalization(relu=True, name='bn5b_branch2a',is_training=False)
# .conv(3, 3, 512, 1, 1, biased=False, relu=False, name='res5b_branch2b')
# .batch_normalization(relu=True, name='bn5b_branch2b',is_training=False)
# .conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5b_branch2c')
# .batch_normalization(name='bn5b_branch2c',is_training=False,relu=False))
# #pdb.set_trace()
# (self.feed('res5a_relu',
# 'bn5b_branch2c')
# .add(name='res5b')
# .relu(name='res5b_relu')
# .conv(1, 1, 512, 1, 1, biased=False, relu=False, name='res5c_branch2a')
# .batch_normalization(relu=True, name='bn5c_branch2a',is_training=False)
# .conv(3, 3, 512, 1, 1, biased=False, relu=False, name='res5c_branch2b')
# .batch_normalization(relu=True, name='bn5c_branch2b',is_training=False)
# .conv(1, 1, 2048, 1, 1, biased=False, relu=False, name='res5c_branch2c')
# .batch_normalization(name='bn5c_branch2c',is_training=False,relu=False))
# #pdb.set_trace()
# (self.feed('res5b_relu',
# 'bn5c_branch2c')
# .add(name='res5c')
# .relu(name='res5c_relu')
# .fc(n_classes, relu=False, name='cls_score')
# .softmax(name='cls_prob'))
# (self.feed('res5c_relu')
# .fc(n_classes*4, relu=False, name='bbox_pred'))
| 58.080247
| 189
| 0.597619
| 2,416
| 18,818
| 4.447434
| 0.074089
| 0.020289
| 0.102932
| 0.109819
| 0.74053
| 0.709912
| 0.548813
| 0.541461
| 0.395905
| 0.384272
| 0
| 0.06968
| 0.240408
| 18,818
| 323
| 190
| 58.260062
| 0.682034
| 0.144064
| 0
| 0.021097
| 0
| 0
| 0.189634
| 0.009158
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008439
| false
| 0
| 0.012658
| 0
| 0.025316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16b8947aeb5e92484b74a59f50dce7a8d1075f22
| 23,601
|
py
|
Python
|
dev/Tools/build/waf-1.7.13/lmbrwaflib/unit_test_lumberyard_modules.py
|
akulamartin/lumberyard
|
2d4be458a02845179be098e40cdc0c48f28f3b5a
|
[
"AML"
] | 8
|
2019-10-07T16:33:47.000Z
|
2020-12-07T03:59:58.000Z
|
dev/Tools/build/waf-1.7.13/lmbrwaflib/unit_test_lumberyard_modules.py
|
29e7e280-0d1c-4bba-98fe-f7cd3ca7500a/lumberyard
|
1c52b941dcb7d94341fcf21275fe71ff67173ada
|
[
"AML"
] | null | null | null |
dev/Tools/build/waf-1.7.13/lmbrwaflib/unit_test_lumberyard_modules.py
|
29e7e280-0d1c-4bba-98fe-f7cd3ca7500a/lumberyard
|
1c52b941dcb7d94341fcf21275fe71ff67173ada
|
[
"AML"
] | 4
|
2019-08-05T07:25:46.000Z
|
2020-12-07T05:12:55.000Z
|
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
from waflib import Errors
import lumberyard_modules
import unittest
import pytest
import utils
class FakeContext(object):
pass
class FakeIncludeSettings(object):
pass
class FakePlatformSettings(object):
def __init__(self, platform_name, aliases=set()):
self.platform = platform_name
self.aliases = aliases
class FakeConfigurationSettings(object):
def __init__(self, settings_name, base_config=None):
self.base_config = base_config
self.name = settings_name
class FakeConfiguration(object):
def __init__(self, settings, is_test=False, is_server=False):
self.settings = settings
self.is_test = is_test
self.is_server = is_server
@pytest.fixture()
def mock_parse_json(mock_json_map):
if not mock_json_map:
mock_json_map = {'path': {}}
def _mock_parse_json(path, _):
return mock_json_map[path]
old_parse_json_file = utils.parse_json_file
utils.parse_json_file = _mock_parse_json
yield
utils.parse_json_file = old_parse_json_file
@pytest.fixture()
def fake_context():
return FakeContext()
def test_SanitizeKWInput_SimpleKwDictionary_Success():
kw = dict(
libpath='mylib'
)
lumberyard_modules.sanitize_kw_input(kw)
assert isinstance(kw['libpath'], list)
assert kw['libpath'][0] == 'mylib'
def test_SanitizeKWInput_SimpleKwDictionaryInAdditionalSettings_Success():
kw = dict(
libpath='mylib',
additional_settings=dict(stlibpath='mystlib')
)
lumberyard_modules.sanitize_kw_input(kw)
assert isinstance(kw['libpath'], list)
assert kw['libpath'][0] == 'mylib'
assert isinstance(kw['additional_settings'], list)
assert isinstance(kw['additional_settings'][0], dict)
assert isinstance(kw['additional_settings'][0]['stlibpath'], list)
assert kw['additional_settings'][0]['stlibpath'][0] == 'mystlib'
@pytest.mark.parametrize(
"target, kw_key, source_section, additional_aliases, merge_dict, expected", [
pytest.param('test_target', 'fake_key', {}, {}, {}, {}, id='MissingKeyInSourceNoChange'),
pytest.param('test_target', 'fake_key', {'fake_key': 'fake_value'}, {}, {}, {'fake_key': 'fake_value'}, id='MissingKeyInTargetKeyAdded'),
pytest.param('test_target', 'copyright_org', {'copyright_org': False}, {}, {'copyright_org': 'AMZN'}, type(Errors.WafError), id='InvalidStringKwInSourceError'),
pytest.param('test_target', 'copyright_org', {'copyright_org': 'AMZN'}, {}, {'copyright_org': False}, type(Errors.WafError), id='InvalidStringKwInTargetError'),
pytest.param('test_target', 'copyright_org', {'copyright_org': 'AMZN'}, {}, {'copyright_org': 'A2Z'}, {'copyright_org': 'AMZN'}, id='MergeStringReplaceSuccess'),
pytest.param('test_target', 'client_only', {'client_only': 'False'}, {}, {'client_only': True}, type(Errors.WafError), id='InvalidBoolKwInSourceError'),
pytest.param('test_target', 'client_only', {'client_only': False}, {}, {'client_only': 'True'}, type(Errors.WafError), id='InvalidBoolKwInTargetError'),
pytest.param('test_target', 'client_only', {'client_only': False}, {}, {'client_only': True}, {'client_only': False}, id='MergeBoolReplaceKwSuccess'),
])
def test_ProjectSettingsFileMergeKwKey_ValidInputs(mock_parse_json, target, kw_key, source_section, additional_aliases, merge_dict, expected):
fake_context = FakeContext()
test_settings = lumberyard_modules.ProjectSettingsFile(fake_context, 'path', additional_aliases)
if isinstance(expected,dict):
test_settings.merge_kw_key(target=target,
kw_key=kw_key,
source_section=source_section,
merge_kw=merge_dict)
assert merge_dict == expected
elif isinstance(expected, type(Errors.WafError)):
with pytest.raises(Errors.WafError):
test_settings.merge_kw_key(target=target,
kw_key=kw_key,
source_section=source_section,
merge_kw=merge_dict)
@pytest.mark.parametrize(
"test_dict, fake_include_settings, mock_json_map, additional_aliases, expected", [
pytest.param({}, None, None, {}, {}, id='BasicNoAdditionalAliasNoAdditionalIncludes'),
pytest.param({}, 'include_test',
{
'path': {
'includes': ['include_test']
},'include_test': {}
}, {}, {'includes': ['include_test']}, id='BasicNoAdditionalAliasSingleAdditionalIncludes')
])
def test_ProjectSettingsFileMergeKwKey_ValidInputs(mock_parse_json, fake_context, test_dict, fake_include_settings, mock_json_map, additional_aliases, expected):
if fake_include_settings:
def _mock_get_project_settings_file(include_settings_file, additional_aliases):
assert fake_include_settings == include_settings_file
fake_settings = FakeIncludeSettings()
return fake_settings
fake_context.get_project_settings_file = _mock_get_project_settings_file
test = lumberyard_modules.ProjectSettingsFile(fake_context,
'path',
additional_aliases)
assert test.dict == expected
@pytest.mark.parametrize(
"mock_json_map, additional_aliases, section_key, expected", [
pytest.param(None, {}, 'no_section', {}, id='SimpleNoChange'),
pytest.param({
'path': {
"test_section": {
"key1": "value1"
}
}
}, {}, 'test_section', {'key1': 'value1'}, id='SimpleChanges')
])
def test_ProjectSettingsFileMergeKwSection_ValidInputs_Success(mock_parse_json, fake_context, mock_json_map, additional_aliases, section_key, expected):
test_settings = lumberyard_modules.ProjectSettingsFile(fake_context, 'path', additional_aliases)
merge_dict = {}
test_settings.merge_kw_section(section_key=section_key,
target='test_target',
merge_kw=merge_dict)
assert expected == merge_dict
class ProjectSettingsTest(unittest.TestCase):
def setUp(self):
self.old_parse_json = utils.parse_json_file
utils.parse_json_file = self.mockParseJson
self.mock_json_map = {}
def tearDown(self):
utils.parse_json_file = self.old_parse_json
def mockParseJson(self, path, _):
return self.mock_json_map[path]
def createSimpleSettings(self, fake_context = FakeContext(), test_dict={}, additional_aliases={}):
self.mock_json_map = {'path': test_dict}
test_settings = lumberyard_modules.ProjectSettingsFile(fake_context, 'path', additional_aliases)
return test_settings
def test_ProjectSettingsFileMergeKwDict_RecursiveMergeAdditionalSettingsNoPlatformNoConfiguration_Success(self):
"""
Test scenario:
Setup a project settings that contains other project settings, so that it can recursively call merge_kw_dict
recursively
"""
include_settings_file = 'include_test'
test_settings_single_include = {'includes': [include_settings_file]}
test_empty_settings = {}
test_merge_kw_key = 'passed'
test_merge_kw_value = True
self.mock_json_map = {'path': test_settings_single_include,
include_settings_file: test_empty_settings}
# Prepare a mock include settings object
test_include_settings = self.createSimpleSettings()
def _mock_merge_kw_dict(target, merge_kw, platform, configuration):
merge_kw[test_merge_kw_key] = test_merge_kw_value
pass
test_include_settings.merge_kw_dict = _mock_merge_kw_dict
# Prepare a mock context
fake_context = FakeContext()
def _mock_get_project_settings_file(_a, _b):
return test_include_settings
fake_context.get_project_settings_file = _mock_get_project_settings_file
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_settings_single_include)
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=None,
configuration=None)
self.assertIn(test_merge_kw_key, test_merge_kw)
self.assertEqual(test_merge_kw[test_merge_kw_key], test_merge_kw_value)
def test_ProjectSettingsFileMergeKwDict_MergePlatformSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when only platform is set and not any configurations
"""
test_platform = 'test_platform'
test_alias = 'alias_1'
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform',
aliases={test_alias})
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform,
configuration=None)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/*'.format(test_platform), sections_merged)
self.assertIn('{}/*'.format(test_alias), sections_merged)
self.assertEqual(len(sections_merged), 2)
def test_ProjectSettingsFileMergeKwDict_MergePlatformConfigurationNoDerivedNoTestNoDedicatedSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when the platform + configuration is set, and the configuration is not a test nor
server configuration
"""
test_platform_name = 'test_platform'
test_configuration_name = 'test_configuration'
test_configuration = FakeConfiguration(settings=FakeConfigurationSettings(settings_name=test_configuration_name))
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform')
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform_name)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform_name,
configuration=test_configuration)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/*'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertEqual(len(sections_merged), 2)
def test_ProjectSettingsFileMergeKwDict_MergePlatformConfigurationDerivedNoTestNoDedicatedSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when the platform + configuration is set, and the configuration is not a test nor
server configuration, but is derived from another configuration
"""
test_platform_name = 'test_platform'
test_configuration_name = 'test_configuration'
base_test_configuration_name = 'base_configuration'
test_configuration = FakeConfiguration(
settings=FakeConfigurationSettings(settings_name=test_configuration_name,
base_config=FakeConfiguration(FakeConfigurationSettings(settings_name=base_test_configuration_name))))
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform')
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform_name)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform_name,
configuration=test_configuration)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/*'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('{}/{}'.format(test_platform_name, base_test_configuration_name), sections_merged)
self.assertEqual(len(sections_merged), 3)
def test_ProjectSettingsFileMergeKwDict_MergePlatformConfigurationNoDerivedTestDedicatedSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when the platform + configuration is set, and the configuration is a test and a
server configuration
"""
test_platform_name = 'test_platform'
test_configuration_name = 'test_configuration'
test_configuration = FakeConfiguration(settings=FakeConfigurationSettings(settings_name=test_configuration_name),
is_test=True,
is_server=True)
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform')
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform_name)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform_name,
configuration=test_configuration)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/{}'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/dedicated,test', sections_merged)
self.assertIn('{}/*/dedicated,test'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/dedicated,test'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/test,dedicated', sections_merged)
self.assertIn('{}/*/test,dedicated'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/test,dedicated'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertEqual(len(sections_merged), 8)
def test_ProjectSettingsFileMergeKwDict_MergePlatformConfigurationNoDerivedTestNoDedicatedSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when the platform + configuration is set, and the configuration is a test but not a
server configuration
"""
test_platform_name = 'test_platform'
test_configuration_name = 'test_configuration'
test_configuration = FakeConfiguration(
settings=FakeConfigurationSettings(settings_name=test_configuration_name),
is_test=True,
is_server=False)
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform')
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform_name)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform_name,
configuration=test_configuration)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/*'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/test', sections_merged)
self.assertIn('{}/*/test'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/test'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/dedicated,test', sections_merged)
self.assertIn('{}/*/dedicated,test'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/dedicated,test'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/test,dedicated', sections_merged)
self.assertIn('{}/*/test,dedicated'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/test,dedicated'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertEqual(len(sections_merged), 11)
def test_ProjectSettingsFileMergeKwDict_MergePlatformConfigurationNoDerivedNoTestDedicatedSection_Success(self):
"""
Test scenario:
Test the merge_kw_dict when the platform + configuration is set, and the configuration is a server but not a
test configuration
"""
test_platform_name = 'test_platform'
test_configuration_name = 'test_configuration'
test_configuration = FakeConfiguration(
settings=FakeConfigurationSettings(settings_name=test_configuration_name),
is_test=False,
is_server=True)
fake_context = FakeContext()
fake_platform_settings = FakePlatformSettings(platform_name='test_platform')
def _mock_get_platform_settings(platform):
self.assertEqual(platform, test_platform_name)
return fake_platform_settings
fake_context.get_platform_settings = _mock_get_platform_settings
test_dict = {}
test_settings = self.createSimpleSettings(fake_context=fake_context,
test_dict=test_dict)
sections_merged = set()
def _mock_merge_kw_section(section, target, merge_kw):
sections_merged.add(section)
pass
test_settings.merge_kw_section = _mock_merge_kw_section
test_merge_kw = {}
test_settings.merge_kw_dict(target='test_target',
merge_kw=test_merge_kw,
platform=test_platform_name,
configuration=test_configuration)
# Validate all the sections passed to the merge_kw_dict
self.assertIn('{}/*'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/dedicated', sections_merged)
self.assertIn('{}/*/dedicated'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/dedicated'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/dedicated,test', sections_merged)
self.assertIn('{}/*/dedicated,test'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/dedicated,test'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertIn('*/*/test,dedicated', sections_merged)
self.assertIn('{}/*/test,dedicated'.format(test_platform_name), sections_merged)
self.assertIn('{}/{}/test,dedicated'.format(test_platform_name, test_configuration_name), sections_merged)
self.assertEqual(len(sections_merged), 11)
| 43.304587
| 191
| 0.637897
| 2,345
| 23,601
| 6.03838
| 0.088699
| 0.04202
| 0.046328
| 0.055085
| 0.724929
| 0.697387
| 0.673234
| 0.662076
| 0.642514
| 0.62959
| 0
| 0.001173
| 0.277785
| 23,601
| 544
| 192
| 43.384191
| 0.829569
| 0.079403
| 0
| 0.581662
| 0
| 0
| 0.099921
| 0.014941
| 0
| 0
| 0
| 0
| 0.17765
| 1
| 0.106017
| false
| 0.028653
| 0.014327
| 0.011461
| 0.17192
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16cdaac129cd705700eab605365385f7b7b8a82c
| 2,236
|
py
|
Python
|
pottan_ocr/utils.py
|
nithyadurai87/pottan-ocr-tamil
|
e455891dc0ddd508d1318abf84fc59cc548873f7
|
[
"MIT"
] | 5
|
2019-05-05T18:26:14.000Z
|
2019-08-02T05:04:12.000Z
|
pottan_ocr/utils.py
|
nithyadurai87/pottan-ocr-tamil
|
e455891dc0ddd508d1318abf84fc59cc548873f7
|
[
"MIT"
] | 3
|
2020-07-17T02:28:11.000Z
|
2021-05-08T21:58:10.000Z
|
pottan_ocr/utils.py
|
nithyadurai87/pottan-ocr-tamil
|
e455891dc0ddd508d1318abf84fc59cc548873f7
|
[
"MIT"
] | 3
|
2020-04-11T19:39:08.000Z
|
2020-12-21T08:44:21.000Z
|
import torch
import json
import numpy as np
from torch.autograd import Variable
import gzip
import yaml
from re import split
from matplotlib import pyplot
def showImg( im ):
pyplot.imshow( im )
pyplot.show()
def myOpen( fname, mode ):
return open( fname, mode, encoding="utf-8" )
def readFile( fname ):
opener, mode = ( gzip.open, 'rt' ) if fname[-3:] == '.gz' else ( open, 'r' )
with opener( fname, mode ) as f:
return f.read()
def readLines( fname ):
return split('[\r\n]', readFile( fname ) )
def readJson( fname ):
with myOpen( fname, 'r' ) as f:
return json.load( f )
def writeFile( fname, contents ):
with myOpen( fname, 'w' ) as f:
f.write( contents )
def writeJson( fname, data ):
with myOpen( fname, 'w') as outfile:
json.dump(data, outfile)
def readYaml( fname ):
with myOpen(fname, 'r') as fp:
return yaml.load( fp )
config = readYaml('./config.yaml')
class averager(object):
"""Compute average for `torch.Variable` and `torch.Tensor`. """
def __init__(self):
self.reset()
def add(self, v):
if isinstance(v, Variable):
count = v.data.numel()
v = v.data.sum()
elif isinstance(v, torch.Tensor):
count = v.numel()
v = v.sum()
self.n_count += count
self.sum += v
def reset(self):
self.n_count = 0
self.sum = 0
def val(self):
res = 0
if self.n_count != 0:
res = self.sum / float(self.n_count)
return res
def loadTrainedModel( model, opt ):
"""Load a pretrained model into given model"""
print('loading pretrained model from %s' % opt.crnn)
if( opt.cuda ):
stateDict = torch.load(opt.crnn )
else:
stateDict = torch.load(opt.crnn, map_location={'cuda:0': 'cpu'} )
# Handle the case of some old torch version. It will save the data as module.<xyz> . Handle it
if( list( stateDict.keys() )[0][:7] == 'module.' ):
for key in list(stateDict.keys()):
stateDict[ key[ 7:] ] = stateDict[key]
del stateDict[ key ]
model.load_state_dict( stateDict )
print('Completed loading pre trained model')
| 24.304348
| 99
| 0.58542
| 301
| 2,236
| 4.312292
| 0.38206
| 0.042373
| 0.046225
| 0.030817
| 0.101695
| 0.035439
| 0
| 0
| 0
| 0
| 0
| 0.006231
| 0.2822
| 2,236
| 91
| 100
| 24.571429
| 0.802492
| 0.086315
| 0
| 0
| 0
| 0
| 0.057579
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.203125
| false
| 0
| 0.125
| 0.03125
| 0.4375
| 0.03125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16cf7d6d5783bc8dc6f881f5646090c8b7e4317c
| 7,584
|
py
|
Python
|
population_estimator/curses_io.py
|
cruzanta/population-estimator
|
cb56c551b615726543d8b1643302be2d30fd593c
|
[
"MIT"
] | 1
|
2019-02-10T01:30:09.000Z
|
2019-02-10T01:30:09.000Z
|
population_estimator/curses_io.py
|
cruzantada/population-estimator
|
cb56c551b615726543d8b1643302be2d30fd593c
|
[
"MIT"
] | null | null | null |
population_estimator/curses_io.py
|
cruzantada/population-estimator
|
cb56c551b615726543d8b1643302be2d30fd593c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
Module for painting output on and obtaining input from a text-based terminal
window using the curses library.
"""
import curses
import textwrap
def display_string(screen, a_string, output_line):
# Paints a string on a text-based terminal window.
_, width = screen.getmaxyx()
try:
screen.addstr(output_line, 0, textwrap.fill(a_string, width - 1))
except curses.error:
screen.addstr(0, 0, textwrap.fill(
'Terminal window too small for output! Please resize. ', width - 1))
return output_line
def display_list_items(screen, a_list, output_line):
# Paints each item of a list on a text-based terminal window.
for item in a_list:
output_line = display_string(screen, '%s' % (item), output_line)
output_line += 1
return output_line
def display_formatted_dict(screen, dct, output_line):
# Paints each key, value pair of a dict on a text-based terminal window.
for key, value in dct.items():
if isinstance(value, int):
value = '{:,}'.format(value)
formatted_dict = '%s: %s' % (key, value)
output_line = display_string(screen, formatted_dict, output_line)
output_line += 1
return output_line
def display_string_with_prompt(screen, first_line_num, a_string, prompt):
"""Paints two strings and accepts input.
Paints two strings on a text-based terminal window. The latter of the two
strings serves as the prompt for the user to enter input.
Args:
screen: A window object that represents the text-based terminal window.
first_line_num: An integer that represents the location along the y-axis
of the terminal window where the first character of the first string
is painted.
a_string: The first string that is painted on the terminal window.
prompt: A string that serves as a prompt for the user to enter input.
Returns:
A string that the user enters in as input.
"""
screen.clear()
output_line = first_line_num
output_line = display_string(screen, a_string, output_line)
output_line += 3
output_line = display_string(screen, prompt, output_line)
screen.refresh()
return screen.getstr(output_line, len(prompt) + 1)
def display_list_items_with_prompt(screen, first_line_num, a_string, a_list,
prompt):
"""Paints a string, each item of a list, and accepts input.
Paints a string, each item of a list, and another string on a text-based
terminal window. Each item of the list is painted on its own line.
The second string serves as a prompt for the user to enter input.
Args:
screen: A window object that represents the text-based terminal window.
first_line_num: An integer that represents the location along the y-axis
of the terminal window where the first character of the first string
is painted.
a_string: The first string that is painted on the terminal window.
a_list: A list whose items are painted on each line of the terminal
window.
prompt: A string that serves as a prompt for the user to enter input.
Returns:
A string that the user enters in as input.
"""
screen.clear()
output_line = first_line_num
output_line = display_string(screen, a_string, output_line)
output_line += 2
output_line = display_list_items(screen, a_list, output_line)
output_line += 1
output_line = display_string(screen, prompt, output_line)
screen.refresh()
return screen.getstr(output_line, len(prompt) + 1)
def display_formatted_dicts_with_prompt(screen, first_line_num, a_string,
list_of_dicts, prompt):
"""Paints a string, each item of each dict in a list, and accepts input.
Paints a string, each item of each dict in a list, and another string on a
text-based terminal window. Each key, value pair of each dict is painted on
its own line with the key and value separated by a colon. The second string
serves as a prompt for the user to enter input.
Args:
screen: A window object that represents the text-based terminal window.
first_line_num: An integer that represents the location along the y-axis
of the terminal window where the first character of the first string
is painted.
a_string: The first string that is painted on the terminal window.
list_of_dicts: A list of dictionaries whose key, value pairs are painted
on their own line of the terminal window.
prompt: A string that serves as a prompt for the user to enter input.
Returns:
A string that the user enters in as input.
"""
screen.clear()
output_line = first_line_num
output_line = display_string(screen, a_string, output_line)
output_line += 2
for dct in list_of_dicts:
output_line = display_formatted_dict(screen, dct, output_line)
output_line += 1
output_line += 1
output_line = display_string(screen, prompt, output_line)
screen.refresh()
return screen.getstr(output_line, len(prompt) + 1)
def get_user_menu_selection(screen, first_line_num, a_string, menu_items,
prompt):
"""Paints a string, a menu, and accepts input.
Paints a string, a menu, and another string on a text-based terminal window.
The menu is composed of the items in a list, and each item is assigned its
own number that represents the order in which the item appears in the menu.
The second string serves as a prompt for the user to enter a number from the
menu.
Args:
screen: A window object that represents the text-based terminal window.
first_line_num: An integer that represents the location along the y-axis
of the terminal window where the first character of the first string
is painted.
a_string: The first string that is painted on the terminal window.
menu_items: A list whose items are painted on each line of the terminal
window as menu options.
prompt: A string that serves as a prompt for the user to enter a number
from the menu.
Returns:
A string representation of the item in 'menu_items' that the user
selects.
"""
# Create a dictionary that contains the items in 'menu_items'. Each item
# is added as a value with an integer key that represents the order in which
# the item will appear in the menu.
item_key = 1
selection_items = {}
for item in menu_items:
selection_items['%s' % (item_key)] = item
item_key += 1
# Display the menu and prompt the user for a selection.
while True:
screen.clear()
output_line = first_line_num
output_line = display_string(screen, a_string, output_line)
output_line += 3
for menu_num in sorted(selection_items.iterkeys()):
item_line = '%s) %s' % (menu_num, selection_items[menu_num])
output_line = display_string(screen, item_line, output_line)
output_line += 1
output_line += 1
output_line = display_string(screen, prompt, output_line)
screen.refresh()
input = screen.getstr(output_line, len(prompt) + 1)
if input not in selection_items.keys():
continue # Force the user to enter a valid selection.
else:
return selection_items[input]
| 36.114286
| 80
| 0.676292
| 1,118
| 7,584
| 4.447227
| 0.12254
| 0.104586
| 0.044449
| 0.055511
| 0.736726
| 0.707763
| 0.673572
| 0.620676
| 0.570193
| 0.55712
| 0
| 0.003944
| 0.264504
| 7,584
| 209
| 81
| 36.287081
| 0.887415
| 0.520042
| 0
| 0.493671
| 0
| 0
| 0.021778
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088608
| false
| 0
| 0.025316
| 0
| 0.202532
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16d1b5218231a945c48c3095503b717e135149a2
| 7,987
|
py
|
Python
|
tests/test_transliterate.py
|
abosoar/camel_tools
|
0a92c06f6dde0063e26df5cbe4d74c2f99b418e0
|
[
"MIT"
] | 1
|
2021-03-23T12:50:47.000Z
|
2021-03-23T12:50:47.000Z
|
tests/test_transliterate.py
|
KaoutharMokrane/camel_tools
|
e9099907835b05d448362bce2cb0e815ac7f5590
|
[
"MIT"
] | null | null | null |
tests/test_transliterate.py
|
KaoutharMokrane/camel_tools
|
e9099907835b05d448362bce2cb0e815ac7f5590
|
[
"MIT"
] | 1
|
2021-01-24T05:06:33.000Z
|
2021-01-24T05:06:33.000Z
|
# -*- coding: utf-8 -*-
# MIT License
#
# Copyright 2018-2020 New York University Abu Dhabi
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Tests for camel_tools.transliterate.
"""
from __future__ import absolute_import
import pytest
from camel_tools.utils.charmap import CharMapper
from camel_tools.utils.transliterate import Transliterator
# A mapper that translates lower-case English characters to a lower-case x and
# upper-case English characters to an upper-case X. This makes it easy to
# predict what the transliteration should be.
TEST_MAP = {
u'A-Z': u'X',
u'a-z': u'x',
}
TEST_MAPPER = CharMapper(TEST_MAP, None)
class TestTransliteratorInit(object):
"""Test class for Transliterator.__init__.
"""
def test_init_none_mapper(self):
"""Test that init raises a TypeError when given a mapper that is None.
"""
with pytest.raises(TypeError):
Transliterator(None)
def test_init_invalid_type_mapper(self):
"""Test that init raises a TypeError when given a mapper that is not a
CharMapper instance.
"""
with pytest.raises(TypeError):
Transliterator({})
def test_init_valid_mapper(self):
"""Test that init doesn't raise an error when given a valid mapper.
"""
assert Transliterator(TEST_MAPPER)
def test_init_none_marker(self):
"""Test that init raises a TypeError when given a marker that is None.
"""
with pytest.raises(TypeError):
Transliterator(TEST_MAPPER, None)
def test_init_invalid_type_marker(self):
"""Test that init raises a TypeError when given a marker that is not a
string.
"""
with pytest.raises(TypeError):
Transliterator(TEST_MAPPER, [])
def test_init_empty_marker(self):
"""Test that init raises a ValueError when given a marker that is an
empty string.
"""
with pytest.raises(ValueError):
Transliterator(TEST_MAPPER, '')
def test_init_invalid_marker1(self):
"""Test that init raises a ValueError when given an invalid marker (
wgitespace in the middle).
"""
with pytest.raises(ValueError):
Transliterator(TEST_MAPPER, '@@LAT @@')
def test_init_invalid_marker2(self):
"""Test that init raises a ValueError when given an invalid marker (
whitespace at the end).
"""
with pytest.raises(ValueError):
Transliterator(TEST_MAPPER, '@@LAT@@ ')
def test_init_invalid_marker3(self):
"""Test that init raises a ValueError when given an invalid marker (
whitespace at the beginning).
"""
with pytest.raises(ValueError):
Transliterator(TEST_MAPPER, ' @@LAT@@')
def test_init_valid_marker1(self):
"""Test that init doesn't raise an error when given a valid marker.
"""
assert Transliterator(TEST_MAPPER, '@@LAT@@')
def test_init_valid_marker2(self):
"""Test that init doesn't raise an error when given a valid marker.
"""
assert Transliterator(TEST_MAPPER, u'@@LAT@@')
class TestTransliteratorTranslate(object):
"""Test class for Transliterator.translate.
"""
def test_trans_empty(self):
"""Test that transliterating an empty string returns an empty string.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'') == u''
def test_trans_single_no_markers(self):
"""Test that a single word with no markers gets transliterated.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'Hello') == u'Xxxxx'
def test_trans_single_with_markers(self):
"""Test that a single word with markers does not get transliterated.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'@@Hello') == u'@@Hello'
def test_trans_single_strip(self):
"""Test that a single word with markers does not get transliterated
but markers do get stripped when strip_markers is set to True.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'@@Hello', True) == u'Hello'
def test_trans_single_ignore(self):
"""Test that a single word with markers gets transliterated when ignore
markers is set to True.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'@@Hello', False, True) == u'@@Xxxxx'
def test_trans_single_ignore_strip(self):
"""Test that a single word with markers gets transliterated with
markers stripped when both strip_markers and ignore_markers are set to
True.
"""
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(u'@@Hello', True, True) == u'Xxxxx'
def test_trans_sent_no_markers(self):
"""Test that a sentence with no markers gets transliterated.
"""
sent_orig = u'Hello World, this is a sentence!'
sent_out = u'Xxxxx Xxxxx, xxxx xx x xxxxxxxx!'
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(sent_orig) == sent_out
def test_trans_sent_with_markers(self):
"""Test that tokens with markers in a sentence do not get
transliterated.
"""
sent_orig = u'Hello @@World, this is a @@sentence!'
sent_out = u'Xxxxx @@World, xxxx xx x @@sentence!'
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(sent_orig) == sent_out
def test_trans_sent_strip(self):
"""Test that tokens with markers in a sentence do not get
transliterated but markers do get stripped when strip_markers is set
to True.
"""
sent_orig = u'Hello @@World, this is a @@sentence!'
sent_out = u'Xxxxx World, xxxx xx x sentence!'
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(sent_orig, True) == sent_out
def test_trans_sent_ignore(self):
"""Test that tokens with markers in a sentence get transliterated
when ignore markers is set to True.
"""
sent_orig = u'Hello @@World, this is a @@sentence!'
sent_out = u'Xxxxx @@Xxxxx, xxxx xx x @@xxxxxxxx!'
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(sent_orig, False, True) == sent_out
def test_trans_sent_ignore_strip(self):
"""Test that tokens with markers in a sentence get transliterated with
markers stripped when both strip_markers and ignore_markers are set to
True.
"""
sent_orig = u'Hello @@World, this is a @@sentence!'
sent_out = u'Xxxxx Xxxxx, xxxx xx x xxxxxxxx!'
trans = Transliterator(TEST_MAPPER, '@@')
assert trans.transliterate(sent_orig, True, True) == sent_out
| 33.700422
| 79
| 0.662076
| 1,035
| 7,987
| 4.97971
| 0.192271
| 0.02988
| 0.051222
| 0.034148
| 0.689756
| 0.650175
| 0.598564
| 0.576251
| 0.520567
| 0.510477
| 0
| 0.002331
| 0.248028
| 7,987
| 236
| 80
| 33.84322
| 0.855811
| 0.428446
| 0
| 0.357143
| 0
| 0
| 0.113724
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.261905
| false
| 0
| 0.047619
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16d397fdfd404f351b1fb42cfa6cff5538a49320
| 790
|
py
|
Python
|
00-Aulas/Aula007_2.py
|
AmandaRH07/Python_Entra21
|
4084962508f1597c0498d8b329e0f45e2ac55302
|
[
"MIT"
] | null | null | null |
00-Aulas/Aula007_2.py
|
AmandaRH07/Python_Entra21
|
4084962508f1597c0498d8b329e0f45e2ac55302
|
[
"MIT"
] | null | null | null |
00-Aulas/Aula007_2.py
|
AmandaRH07/Python_Entra21
|
4084962508f1597c0498d8b329e0f45e2ac55302
|
[
"MIT"
] | null | null | null |
# Funções
cabecalho = "SISTEMA DE CADASTRO DE FUNCIONARIO\n\n\n"
rodape = "\n\n\n Obrigada pela preferencia"
def imprimir_tela(conteudo):
print(cabecalho)
#print(opcao_menu)
print(conteudo)
print(rodape)
def ler_opcoes():
opcao = int(input("Insira a opção: "))
return opcao
def carregar_opcoes(opcao):
if opcao == 1:
imprimir_tela("A opção escolhida foi 'Cadastrar funcionário'")
elif opcao == 2:
imprimir_tela("A opção escolhida foi 'Listar funcionários'")
elif opcao == 3:
imprimir_tela("A opção escolhida foi 'Editar funcionário'")
elif opcao == 4:
imprimir_tela("A opção escolhida foi 'Deletar funcionário'")
elif opcao == 5:
imprimir_tela("A opção escolhida foi 'Sair'")
else:
pass
| 27.241379
| 70
| 0.655696
| 101
| 790
| 5.039604
| 0.445545
| 0.141454
| 0.127701
| 0.176817
| 0.294695
| 0.294695
| 0
| 0
| 0
| 0
| 0
| 0.008292
| 0.236709
| 790
| 29
| 71
| 27.241379
| 0.835821
| 0.03038
| 0
| 0
| 0
| 0
| 0.378272
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.136364
| false
| 0.045455
| 0
| 0
| 0.181818
| 0.136364
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16d79dca474781cfacdcca9ed1544b5e9e33234c
| 2,612
|
py
|
Python
|
src/richie/apps/courses/lms/edx.py
|
kernicPanel/richie
|
803deda3e29383ce85593e1836a3cf4efc6b847e
|
[
"MIT"
] | null | null | null |
src/richie/apps/courses/lms/edx.py
|
kernicPanel/richie
|
803deda3e29383ce85593e1836a3cf4efc6b847e
|
[
"MIT"
] | null | null | null |
src/richie/apps/courses/lms/edx.py
|
kernicPanel/richie
|
803deda3e29383ce85593e1836a3cf4efc6b847e
|
[
"MIT"
] | null | null | null |
"""
Backend to connect Open edX richie with an LMS
"""
import logging
import re
import requests
from requests.auth import AuthBase
from ..serializers import SyncCourseRunSerializer
from .base import BaseLMSBackend
logger = logging.getLogger(__name__)
def split_course_key(key):
"""Split an OpenEdX course key by organization, course and course run codes.
We first try splitting the key as a version 1 key (course-v1:org+course+run)
and fallback the old version (org/course/run).
"""
if key.startswith("course-v1:"):
organization, course, run = key[10:].split("+")
else:
organization, course, run = key.split("/")
return organization, course, run
class EdXTokenAuth(AuthBase):
"""Attach HTTP token authentication to the given Request object."""
def __init__(self, token):
"""Set-up token value in the instance."""
self.token = token
def __call__(self, request):
"""Modify and return the request."""
request.headers.update(
{"X-Edx-Api-Key": self.token, "Content-Type": "application/json"}
)
return request
class TokenAPIClient(requests.Session):
"""
A :class:`requests.Session` that automatically authenticates against edX's preferred
authentication method up to Dogwood, given a secret token.
For more usage details, see documentation of the :class:`requests.Session` object:
https://requests.readthedocs.io/en/master/user/advanced/#session-objects
"""
def __init__(self, token, *args, **kwargs):
"""Extending the session object by setting the authentication token."""
super().__init__(*args, **kwargs)
self.auth = EdXTokenAuth(token)
class EdXLMSBackend(BaseLMSBackend):
"""LMS backend for Richie tested with Open EdX Dogwood to Hawthorn."""
@property
def api_client(self):
"""Instantiate and return an edx token API client."""
return TokenAPIClient(self.configuration["API_TOKEN"])
def extract_course_id(self, url):
"""Extract the LMS course id from the course run url."""
return re.match(self.configuration["COURSE_REGEX"], url).group("course_id")
def extract_course_number(self, data):
"""Extract the LMS course number from data dictionary."""
course_id = self.extract_course_id(data.get("resource_link"))
return split_course_key(course_id)[1]
@staticmethod
def get_course_run_serializer(data, partial=False):
"""Prepare data and return a bound serializer."""
return SyncCourseRunSerializer(data=data, partial=partial)
| 32.246914
| 88
| 0.68683
| 326
| 2,612
| 5.380368
| 0.407975
| 0.041049
| 0.035918
| 0.027366
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002886
| 0.204058
| 2,612
| 80
| 89
| 32.65
| 0.840789
| 0.383231
| 0
| 0
| 0
| 0
| 0.06345
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.216216
| false
| 0
| 0.162162
| 0
| 0.621622
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16e4dfbf8bd61eccd8ee52165a28c0666d169326
| 840
|
py
|
Python
|
test_mnist.py
|
aidiary/chainer-siamese
|
6abce9192298e14682a7c766e2a5cdd10f519193
|
[
"MIT"
] | null | null | null |
test_mnist.py
|
aidiary/chainer-siamese
|
6abce9192298e14682a7c766e2a5cdd10f519193
|
[
"MIT"
] | null | null | null |
test_mnist.py
|
aidiary/chainer-siamese
|
6abce9192298e14682a7c766e2a5cdd10f519193
|
[
"MIT"
] | null | null | null |
import os
import chainer
import chainer.links as L
from net import SiameseNetwork
import numpy as np
import matplotlib.pyplot as plt
# 訓練済みモデルをロード
model = SiameseNetwork()
chainer.serializers.load_npz(os.path.join('result', 'model.npz'), model)
# テストデータをロード
_, test = chainer.datasets.get_mnist(ndim=3)
test_data, test_label = test._datasets
# テストデータを学習した低次元空間(2次元)に写像
y = model.forward_once(test_data)
feat = y.data
# ラベルごとに描画
c = ['#ff0000', '#ffff00', '#00ff00', '#00ffff', '#0000ff',
'#ff00ff', '#990000', '#999900', '#009900', '#009999']
# 各ラベルごとに異なる色でプロット
# 同じクラス内のインスタンスが近くに集まり、
# 異なるクラスのインスタンスが離れていれば成功
for i in range(10):
f = feat[np.where(test_label == i)]
plt.plot(f[:, 0], f[:, 1], '.', c=c[i])
plt.legend(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'])
plt.savefig(os.path.join('result', 'result.png'))
| 24.705882
| 72
| 0.667857
| 118
| 840
| 4.677966
| 0.618644
| 0.047101
| 0.036232
| 0.057971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07978
| 0.134524
| 840
| 33
| 73
| 25.454545
| 0.679505
| 0.140476
| 0
| 0
| 0
| 0
| 0.156863
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.315789
| 0
| 0.315789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
16e5abfcca6728651310e1b9d7d20815d0685476
| 5,535
|
py
|
Python
|
TwoFeetTempoMove.py
|
b0nz0/TwisterTempo
|
fc975af4095509d8ec4fe2f84313fe152577bed2
|
[
"MIT"
] | null | null | null |
TwoFeetTempoMove.py
|
b0nz0/TwisterTempo
|
fc975af4095509d8ec4fe2f84313fe152577bed2
|
[
"MIT"
] | null | null | null |
TwoFeetTempoMove.py
|
b0nz0/TwisterTempo
|
fc975af4095509d8ec4fe2f84313fe152577bed2
|
[
"MIT"
] | null | null | null |
from random import randrange, random
from time import time
import logging
from TwisterTempoGUI import TwisterTempoGUI
class TwoFeetTempoMove(object):
COLORS_ALPHA = {0: 'RED', 1: 'BLUE', 2: 'YELLOW', 3: 'GREEN'}
COLORS_RGB = {0: (255, 0, 0), 1: (0, 0, 255), 2: (255, 255, 0), 3: (0, 255, 0)}
FOOT_CHANGE_PERC = 0.3
FOOT_ON_AIR_PERC = 0.08
FEET_ON_SAME_CIRCLE_PERC = 0.05
def __init__(self, min_delay=0, max_delay=100):
assert min_delay >= 0
assert max_delay > 0
self.min_delay = min_delay
self.max_delay = max_delay
self._last_beat_millis = 0
self._left_color = randrange(0, len(TwoFeetTempoMove.COLORS_ALPHA))
self._right_color = randrange(0, len(TwoFeetTempoMove.COLORS_ALPHA))
self._left_direction = "FW"
self._right_direction = "FW"
self._next_foot = 'RIGHT'
logging.info("Starting with LEFT: %s, RIGHT: %s" %
(TwoFeetTempoMove.COLORS_ALPHA[self._left_color],
TwoFeetTempoMove.COLORS_ALPHA[self._right_color]))
self.tt_gui = TwisterTempoGUI()
self.tt_gui.set_left_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color])
self.tt_gui.set_right_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color])
self._starting_millis = time() * 1000
def get_colors_alpha(self):
return {'RIGHT': TwoFeetTempoMove.COLORS_ALPHA[self._right_color],
'LEFT': TwoFeetTempoMove.COLORS_ALPHA[self._left_color]}
def get_colors_rgb(self):
return {'RIGHT': TwoFeetTempoMove.COLORS_RGB[self._right_color],
'LEFT': TwoFeetTempoMove.COLORS_RGB[self._left_color]}
def increase_speed(self):
self.min_delay = self.min_delay - 10
def decrease_speed(self):
self.min_delay = self.min_delay + 10
def tempo_found_callback(self, seconds, millis, confidence):
act_delay = millis - self._last_beat_millis + randrange(0, self.max_delay)
if act_delay >= self.min_delay:
self._last_beat_millis = millis
self.beat_found()
def beat_found(self):
millis = self._last_beat_millis
logging.debug("Randomized beat found at: %d:%d.%d" %
(millis / 60000, millis / 1000, millis % 1000))
act_millis = time() * 1000 - self._starting_millis
logging.debug("\tActual: %d:%d.%d" %
(act_millis / 60000, act_millis / 1000, act_millis % 1000))
# special moves
if random() < TwoFeetTempoMove.FOOT_ON_AIR_PERC: # randomized next foot on air move
if self._next_foot == 'RIGHT':
self.tt_gui.set_right_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color], on_air=True)
else:
self.tt_gui.set_left_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color], on_air=True)
logging.debug("\tmove next foot On Air")
elif random() < TwoFeetTempoMove.FEET_ON_SAME_CIRCLE_PERC: # randomized both feet on same circle
if self._next_foot == 'RIGHT':
self._right_color = self._left_color
self.tt_gui.set_large_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color])
else:
self._left_color = self._right_color
self.tt_gui.set_large_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color])
logging.debug("\tmove both feet on same circle")
# end special moves
else:
if random() < TwoFeetTempoMove.FOOT_CHANGE_PERC: # randomize at 30% the switch on foot
if self._next_foot == 'RIGHT':
self._next_foot = 'LEFT'
else:
self._next_foot = 'RIGHT'
if self._next_foot == 'RIGHT':
if self._right_direction == "FW":
if self._right_color == len(TwoFeetTempoMove.COLORS_ALPHA) - 1:
self._right_color = self._right_color - 1
self._right_direction = "BW"
else:
self._right_color = self._right_color + 1
else:
if self._right_color == 0:
self._right_color = self._right_color + 1
self._right_direction = "FW"
else:
self._right_color = self._right_color - 1
self.tt_gui.set_right_color(TwoFeetTempoMove.COLORS_ALPHA[self._right_color])
logging.debug("\tmove RIGHT foot to " + TwoFeetTempoMove.COLORS_ALPHA[self._right_color])
self._next_foot = 'LEFT'
else:
if self._left_direction == "FW":
if self._left_color == len(TwoFeetTempoMove.COLORS_ALPHA) - 1:
self._left_color = self._left_color - 1
self._left_direction = "BW"
else:
self._left_color = self._left_color + 1
else:
if self._left_color == 0:
self._left_color = self._left_color + 1
self._left_direction = "FW"
else:
self._left_color = self._left_color - 1
self.tt_gui.set_left_color(TwoFeetTempoMove.COLORS_ALPHA[self._left_color])
logging.debug("\tmove LEFT foot to " + TwoFeetTempoMove.COLORS_ALPHA[self._left_color])
self._next_foot = 'RIGHT'
| 45.368852
| 106
| 0.592954
| 652
| 5,535
| 4.673313
| 0.142638
| 0.065638
| 0.089596
| 0.162783
| 0.605514
| 0.507384
| 0.409583
| 0.350837
| 0.258943
| 0.258943
| 0
| 0.026302
| 0.313098
| 5,535
| 121
| 107
| 45.743802
| 0.775118
| 0.024571
| 0
| 0.352941
| 0
| 0
| 0.051001
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 1
| 0.068627
| false
| 0
| 0.039216
| 0.019608
| 0.186275
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16ef740b41f41832481d4956834bb037ddc3b7b6
| 2,614
|
py
|
Python
|
tests/test_nested_structures_inside_structure_values.py
|
Robinson04/StructNoSQL
|
335c63593025582336bb67ad0b0ed39d30800b74
|
[
"MIT"
] | 3
|
2020-10-30T23:31:26.000Z
|
2022-03-30T21:48:40.000Z
|
tests/test_nested_structures_inside_structure_values.py
|
Robinson04/StructNoSQL
|
335c63593025582336bb67ad0b0ed39d30800b74
|
[
"MIT"
] | 42
|
2020-09-16T15:23:11.000Z
|
2021-09-20T13:00:50.000Z
|
tests/test_nested_structures_inside_structure_values.py
|
Robinson04/StructNoSQL
|
335c63593025582336bb67ad0b0ed39d30800b74
|
[
"MIT"
] | 2
|
2021-01-03T21:37:22.000Z
|
2021-08-12T20:28:52.000Z
|
import unittest
from typing import Set, Optional, Dict, List
from uuid import uuid4
from StructNoSQL import BaseField, MapModel, TableDataModel
from tests.components.playground_table_clients import PlaygroundDynamoDBBasicTable, TEST_ACCOUNT_ID
class TableModel(TableDataModel):
accountId = BaseField(field_type=str, required=True)
nestedDictDictStructure = BaseField(field_type=Dict[str, Dict[str, bool]], required=False, key_name='itemKey')
# nestedDictListStructure = BaseField(field_type=Dict[str, List[str]], required=False)
# nestedDictSetStructure = BaseField(field_type=Dict[str, Set[str]], required=False)
class TestsNestedStructuresInsideStructureValues(unittest.TestCase):
def __init__(self, method_name: str):
super().__init__(methodName=method_name)
self.users_table = PlaygroundDynamoDBBasicTable(data_model=TableModel)
def test_nested_dict_dict_structure(self):
random_parent_key = f"parentKey_{uuid4()}"
random_child_key = f"childKey_{uuid4()}"
keys_fields_switch = list(self.users_table.fields_switch.keys())
self.assertIn('nestedDictDictStructure.{{itemKey}}.{{itemKeyChild}}', keys_fields_switch)
update_success = self.users_table.update_field(
key_value=TEST_ACCOUNT_ID,
field_path='nestedDictDictStructure.{{itemKey}}.{{itemKeyChild}}',
query_kwargs={'itemKey': random_parent_key, 'itemKeyChild': random_child_key},
value_to_set=True
)
self.assertTrue(update_success)
retrieved_item = self.users_table.get_field(
key_value=TEST_ACCOUNT_ID,
field_path='nestedDictDictStructure.{{itemKey}}',
query_kwargs={'itemKey': random_parent_key}
)
self.assertEqual(retrieved_item, {'itemKeyChild': True})
removed_item = self.users_table.remove_field(
key_value=TEST_ACCOUNT_ID,
field_path='nestedDictDictStructure.{{itemKey}}',
query_kwargs={'itemKey': random_parent_key}
)
self.assertEqual(removed_item, {'itemKeyChild': True})
retrieved_expected_none_item = self.users_table.get_field(
TEST_ACCOUNT_ID,
field_path='nestedDictDictStructure.{{itemKey}}',
query_kwargs={'itemKey': random_parent_key}
)
self.assertIsNone(retrieved_expected_none_item)
def test_nested_dict_list_structure(self):
# todo: implement
pass
def test_nested_dict_set_structure(self):
# todo: implement
pass
if __name__ == '__main__':
unittest.main()
| 38.441176
| 114
| 0.704285
| 277
| 2,614
| 6.270758
| 0.306859
| 0.031088
| 0.048359
| 0.041451
| 0.342545
| 0.264824
| 0.218768
| 0.218768
| 0.218768
| 0.218768
| 0
| 0.001429
| 0.197016
| 2,614
| 67
| 115
| 39.014925
| 0.826108
| 0.076129
| 0
| 0.229167
| 0
| 0
| 0.134855
| 0.086722
| 0
| 0
| 0
| 0.014925
| 0.104167
| 1
| 0.083333
| false
| 0.041667
| 0.104167
| 0
| 0.270833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bc43defd49d4ea43585c8d3910e9622ef8bc8d38
| 1,099
|
py
|
Python
|
scrapy/spider/spider/items.py
|
huobingli/splider
|
a62f0553160531a0735b249b0dc49747e9c821f9
|
[
"MIT"
] | null | null | null |
scrapy/spider/spider/items.py
|
huobingli/splider
|
a62f0553160531a0735b249b0dc49747e9c821f9
|
[
"MIT"
] | null | null | null |
scrapy/spider/spider/items.py
|
huobingli/splider
|
a62f0553160531a0735b249b0dc49747e9c821f9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
from scrapy.loader import ItemLoader
from scrapy.loader.processors import TakeFirst
# class SpiderItem(scrapy.Item):
# # define the fields for your item here like:
# # name = scrapy.Field()
# pass
#
#
#
# class TorrentItem(scrapy.Item):
# url = scrapy.Field()
# name = scrapy.Field()
# description = scrapy.Field()
# size = scrapy.Field()
#
# import scrapy
class StockstarItemLoader(ItemLoader):
# 自定义itemloader,用于存储爬虫所抓取的字段内容
default_output_processor = TakeFirst()
class StockstarItem(scrapy.Item): # 建立相应的字段
# define the fields for your item here like:
# name = scrapy.Field()
code = scrapy.Field() # 股票代码
abbr = scrapy.Field() # 股票简称
last_trade = scrapy.Field() # 最新价
chg_ratio = scrapy.Field() # 涨跌幅
chg_amt = scrapy.Field() # 涨跌额
chg_ratio_5min = scrapy.Field() # 5分钟涨幅
volumn = scrapy.Field() # 成交量
turn_over = scrapy.Field() # 成交额
| 24.977273
| 52
| 0.66424
| 133
| 1,099
| 5.428571
| 0.503759
| 0.213296
| 0.062327
| 0.049862
| 0.135734
| 0.135734
| 0.135734
| 0.135734
| 0.135734
| 0.135734
| 0
| 0.00348
| 0.215651
| 1,099
| 44
| 53
| 24.977273
| 0.834107
| 0.503185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.214286
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bc44f25c8ff96beccbbd3fbaa05ae2dcf6790cc6
| 576
|
py
|
Python
|
fopp/Chapter 12. Functions/get_num_digits.py
|
H2u-Hwng/EVC
|
c650fe7356a333011514cf9025dfd97bf71b1de3
|
[
"MIT"
] | null | null | null |
fopp/Chapter 12. Functions/get_num_digits.py
|
H2u-Hwng/EVC
|
c650fe7356a333011514cf9025dfd97bf71b1de3
|
[
"MIT"
] | null | null | null |
fopp/Chapter 12. Functions/get_num_digits.py
|
H2u-Hwng/EVC
|
c650fe7356a333011514cf9025dfd97bf71b1de3
|
[
"MIT"
] | null | null | null |
# Take number, and convert integer to string
# Calculate and return number of digits
def get_num_digits(num):
# Convert int to str
num_str = str(num)
# Calculate number of digits
digits = len(num_str)
return digits
# Define main function
def main():
# Prompt user for an integer
number = int(input('Enter an integer: '))
# Obtain number of digits
num_digits = get_num_digits(number)
# Display result
print(f'The number of digits in number {number} is {num_digits}.')
# Call main function
main()
| 20.571429
| 70
| 0.647569
| 80
| 576
| 4.5625
| 0.4375
| 0.087671
| 0.153425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.276042
| 576
| 27
| 71
| 21.333333
| 0.8753
| 0.402778
| 0
| 0
| 0
| 0
| 0.221557
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0
| 0
| 0.333333
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bc450f5f688b95fda7b269a4ca568c7ecc5143ca
| 4,992
|
py
|
Python
|
whois/__init__.py
|
mzpqnxow/whois-1
|
b5623ed25cfa58d9457d30dae640e69b9e530b23
|
[
"MIT"
] | null | null | null |
whois/__init__.py
|
mzpqnxow/whois-1
|
b5623ed25cfa58d9457d30dae640e69b9e530b23
|
[
"MIT"
] | null | null | null |
whois/__init__.py
|
mzpqnxow/whois-1
|
b5623ed25cfa58d9457d30dae640e69b9e530b23
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import division
from future import standard_library
standard_library.install_aliases()
from builtins import *
import re
import sys
import os
import subprocess
import socket
from .parser import WhoisEntry
from .whois import NICClient
# thanks to https://www.regextester.com/104038
IPV4_OR_V6 = re.compile(r"((^\s*((([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))\s*$)|(^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$))")
def whois(url, command=False, flags=0):
# clean domain to expose netloc
ip_match = IPV4_OR_V6.match(url)
if ip_match:
domain = url
try:
result = socket.gethostbyaddr(url)
except socket.herror as e:
pass
else:
domain = extract_domain(result[0])
else:
domain = extract_domain(url)
if command:
# try native whois command
r = subprocess.Popen(['whois', domain], stdout=subprocess.PIPE)
text = r.stdout.read().decode()
else:
# try builtin client
nic_client = NICClient()
text = nic_client.whois_lookup(None, domain.encode('idna'), flags)
return WhoisEntry.load(domain, text)
suffixes = None
def extract_domain(url):
"""Extract the domain from the given URL
>>> print(extract_domain('http://www.google.com.au/tos.html'))
google.com.au
>>> print(extract_domain('abc.def.com'))
def.com
>>> print(extract_domain(u'www.公司.hk'))
公司.hk
>>> print(extract_domain('chambagri.fr'))
chambagri.fr
>>> print(extract_domain('www.webscraping.com'))
webscraping.com
>>> print(extract_domain('198.252.206.140'))
stackoverflow.com
>>> print(extract_domain('102.112.2O7.net'))
2o7.net
>>> print(extract_domain('globoesporte.globo.com'))
globo.com
>>> print(extract_domain('1-0-1-1-1-0-1-1-1-1-1-1-1-.0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info'))
0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info
>>> print(extract_domain('2607:f8b0:4006:802::200e'))
1e100.net
>>> print(extract_domain('172.217.3.110'))
1e100.net
"""
if IPV4_OR_V6.match(url):
# this is an IP address
return socket.gethostbyaddr(url)[0]
# load known TLD suffixes
global suffixes
if not suffixes:
# downloaded from https://publicsuffix.org/list/public_suffix_list.dat
tlds_path = os.path.join(os.getcwd(), os.path.dirname(__file__), 'data', 'public_suffix_list.dat')
with open(tlds_path, encoding='utf-8') as tlds_fp:
suffixes = set(line.encode('utf-8') for line in tlds_fp.read().splitlines() if line and not line.startswith('//'))
if not isinstance(url, str):
url = url.decode('utf-8')
url = re.sub('^.*://', '', url)
url = url.split('/')[0].lower()
# find the longest suffix match
domain = b''
split_url = url.split('.')
for section in reversed(split_url):
if domain:
domain = b'.' + domain
domain = section.encode('utf-8') + domain
if domain not in suffixes:
if not b'.' in domain and len(split_url) >= 2:
# If this is the first section and there wasn't a match, try to
# match the first two sections - if that works, keep going
# See https://github.com/richardpenman/whois/issues/50
second_order_tld = '.'.join([split_url[-2], split_url[-1]])
if not second_order_tld.encode('utf-8') in suffixes:
break
else:
break
return domain.decode('utf-8')
if __name__ == '__main__':
try:
url = sys.argv[1]
except IndexError:
print('Usage: %s url' % sys.argv[0])
else:
print(whois(url))
| 42.305085
| 1,227
| 0.55629
| 927
| 4,992
| 2.9137
| 0.211435
| 0.035542
| 0.048871
| 0.059237
| 0.197705
| 0.183636
| 0.178082
| 0.155498
| 0.138467
| 0.138467
| 0
| 0.109804
| 0.182692
| 4,992
| 117
| 1,228
| 42.666667
| 0.552206
| 0.252404
| 0
| 0.132353
| 0
| 0.014706
| 0.358366
| 0.337107
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029412
| false
| 0.014706
| 0.191176
| 0
| 0.264706
| 0.044118
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bc45c15aebfb0da618b90f3884eb8a545e0f2823
| 3,255
|
py
|
Python
|
app/dialog/avatar_picture_dialog.py
|
tirinox/alphavatarbot
|
5adac8c9c4534206eaf6c146f6e194ed5951d055
|
[
"MIT"
] | 1
|
2021-03-18T15:35:15.000Z
|
2021-03-18T15:35:15.000Z
|
app/dialog/avatar_picture_dialog.py
|
tirinox/alphavatarbot
|
5adac8c9c4534206eaf6c146f6e194ed5951d055
|
[
"MIT"
] | null | null | null |
app/dialog/avatar_picture_dialog.py
|
tirinox/alphavatarbot
|
5adac8c9c4534206eaf6c146f6e194ed5951d055
|
[
"MIT"
] | 1
|
2021-03-18T15:35:51.000Z
|
2021-03-18T15:35:51.000Z
|
import asyncio
from contextlib import AsyncExitStack
from aiogram.dispatcher.filters.state import StatesGroup, State
from aiogram.dispatcher.storage import FSMContextProxy
from aiogram.types import Message, PhotoSize, ReplyKeyboardRemove, ContentTypes
from aiogram.utils.helper import HelperMode
from dialog.avatar_image_work import download_tg_photo, get_userpic, combine_frame_and_photo_v2, img_to_bio
from dialog.base import BaseDialog, message_handler
from localization import BaseLocalization
from lib.depcont import DepContainer
from lib.texts import kbd
# todo: accept documents!
class AvatarStates(StatesGroup):
mode = HelperMode.snake_case # fixme: no state handle
MAIN = State()
class AvatarDialog(BaseDialog):
def __init__(self, loc: BaseLocalization, data: FSMContextProxy, d: DepContainer):
super().__init__(loc, data, d)
self._work_lock = asyncio.Lock()
def menu_kbd(self):
return kbd([
self.loc.BUTTON_AVA_FROM_MY_USERPIC,
], vert=True)
@message_handler(state=None)
async def on_no_state(self, message: Message):
await self.on_enter(message)
@message_handler(state=AvatarStates.MAIN)
async def on_enter(self, message: Message):
if message.text == self.loc.BUTTON_AVA_FROM_MY_USERPIC:
await self.handle_avatar_picture(message, self.loc)
else:
await AvatarStates.MAIN.set()
await message.answer(self.loc.TEXT_AVA_WELCOME, reply_markup=self.menu_kbd())
@message_handler(state=AvatarStates.MAIN, content_types=ContentTypes.PHOTO)
async def on_picture(self, message: Message):
await self.handle_avatar_picture(message, self.loc, explicit_picture=message.photo[0])
async def handle_avatar_picture(self, message: Message, loc: BaseLocalization, explicit_picture: PhotoSize = None):
async with AsyncExitStack() as stack:
stack.enter_async_context(self._work_lock)
# POST A LOADING STICKER
sticker = await message.answer_sticker(self.loc.LOADING_STICKER,
disable_notification=True,
reply_markup=ReplyKeyboardRemove())
# CLEAN UP IN THE END
stack.push_async_callback(sticker.delete)
if explicit_picture is not None:
user_pic = await download_tg_photo(explicit_picture)
else:
user_pic = await get_userpic(message.from_user)
w, h = user_pic.size
if not w or not h:
await message.answer(loc.TEXT_AVA_ERR_INVALID, reply_markup=self.menu_kbd())
return
if not ((64 <= w <= 4096) and (64 <= h <= 4096)):
await message.answer(loc.TEXT_AVA_ERR_SIZE, reply_markup=self.menu_kbd())
return
# pic = await combine_frame_and_photo(self.deps.cfg, user_pic)
pic = await combine_frame_and_photo_v2(self.deps.cfg, user_pic)
user_id = message.from_user.id
pic = img_to_bio(pic, name=f'alpha_avatar_{user_id}.png')
await message.answer_document(pic, caption=loc.TEXT_AVA_READY, reply_markup=self.menu_kbd())
| 39.695122
| 119
| 0.677112
| 404
| 3,255
| 5.200495
| 0.324257
| 0.023322
| 0.042837
| 0.036173
| 0.254641
| 0.150405
| 0.097097
| 0.039981
| 0
| 0
| 0
| 0.006093
| 0.243625
| 3,255
| 81
| 120
| 40.185185
| 0.847279
| 0.046083
| 0
| 0.070175
| 0
| 0
| 0.00839
| 0.00839
| 0
| 0
| 0
| 0.012346
| 0
| 1
| 0.035088
| false
| 0
| 0.192982
| 0.017544
| 0.350877
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bc4baf04ed5a5ebda75a1b19ad254a0f725f6190
| 2,027
|
py
|
Python
|
nehebn2.py
|
psifertex/nehebn2
|
8b62a88a9d06624dbb62b8b74cc0566172fba970
|
[
"MIT"
] | null | null | null |
nehebn2.py
|
psifertex/nehebn2
|
8b62a88a9d06624dbb62b8b74cc0566172fba970
|
[
"MIT"
] | null | null | null |
nehebn2.py
|
psifertex/nehebn2
|
8b62a88a9d06624dbb62b8b74cc0566172fba970
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from components import ProgramState
import binaryninja as binja
import argparse
import os.path
import curses
# TODO...impliment live-refreashing the settings.json during run (add the keybinding and check for it here in the global input loop)
# TODO...support multi-key presses? Not sure if this already works or not
# TODO...make sure to support small terminals (I think it does right now, but I should add some more checks so nothing goes out of bounds)
def main(stdscr):
# Setup
parser = argparse.ArgumentParser(description='Nearly Headless BinaryNinja.')
parser.add_argument('filename', nargs='?', default="")
args = parser.parse_args()
program = ''
if not args.filename == "":
if os.path.isfile(args.filename):
bv = binja.BinaryViewType.get_view_of_file(''.join(args.filename), False)
bv.update_analysis()
while not str(bv.analysis_progress) == "Idle":
prog = bv.analysis_progress
stdscr.erase()
stdscr.border()
state = ''
if prog.state == binja.AnalysisState.DisassembleState:
state = "Disassembling"
else:
state = "Analyzing"
loadingText = "Loading File: "
prog = int((prog.count/(prog.total+1))*34.0)
stdscr.addstr(2, 4, loadingText)
stdscr.addstr(2, 4 + len(loadingText), state)
stdscr.addstr(4, 4, '[' + '#'*prog + ' '*(34-prog) + ']')
stdscr.refresh()
program = ProgramState(stdscr, bv)
else:
raise IOError("File does not exist.")
else:
program = ProgramState(stdscr)
key = ""
while program.is_running:
# Input Filtering
try:
key = stdscr.getkey()
except curses.error as err:
if not str(err) == "no input":
raise curses.error(str(err))
else:
key = "" # Clear Key Buffer
# Rendering and input
program.parseInput(key)
program.render()
curses.doupdate()
if __name__ == "__main__":
background = "2a2a2a"
text = "e0e0e0"
curses.wrapper(main)
| 28.957143
| 138
| 0.644795
| 253
| 2,027
| 5.098814
| 0.561265
| 0.027907
| 0.027907
| 0.021705
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01225
| 0.23483
| 2,027
| 69
| 139
| 29.376812
| 0.819471
| 0.207203
| 0
| 0.12
| 0
| 0
| 0.080675
| 0
| 0
| 0
| 0
| 0.014493
| 0
| 1
| 0.02
| false
| 0
| 0.1
| 0
| 0.12
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bc511f5404ed81ec6c064f4f97b303375361769d
| 774
|
py
|
Python
|
leetcode/47.py
|
windniw/just-for-fun
|
54e5c2be145f3848811bfd127f6a89545e921570
|
[
"Apache-2.0"
] | 1
|
2019-08-28T23:15:25.000Z
|
2019-08-28T23:15:25.000Z
|
leetcode/47.py
|
windniw/just-for-fun
|
54e5c2be145f3848811bfd127f6a89545e921570
|
[
"Apache-2.0"
] | null | null | null |
leetcode/47.py
|
windniw/just-for-fun
|
54e5c2be145f3848811bfd127f6a89545e921570
|
[
"Apache-2.0"
] | null | null | null |
"""
link: https://leetcode.com/problems/permutations-ii
problem: 求全排列,nums中存在重复数
solution: 同46,加上排序即可
"""
class Solution:
def permuteUnique(self, nums: List[int]) -> List[List[int]]:
if len(nums) == 1:
return [nums]
new_nums = nums.copy()
new_nums.sort()
res = []
for i in range(0, len(new_nums)):
if i + 1 < len(new_nums) and new_nums[i] == new_nums[i + 1]:
continue
new_nums[i], new_nums[0] = new_nums[0], new_nums[i]
sub_result = self.permuteUnique(new_nums[1:])
for r in sub_result:
res.append([new_nums[0]] + r.copy())
new_nums[i], new_nums[0] = new_nums[0], new_nums[i]
return res
| 28.666667
| 73
| 0.529716
| 106
| 774
| 3.698113
| 0.367925
| 0.285714
| 0.122449
| 0.112245
| 0.201531
| 0.163265
| 0.163265
| 0.163265
| 0.163265
| 0.163265
| 0
| 0.023166
| 0.330749
| 774
| 26
| 74
| 29.769231
| 0.733591
| 0.127907
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bc6e15840fb47699b6ed6ae5254ac356715fcfad
| 2,794
|
py
|
Python
|
tests/gen_test.py
|
tinylambda/tornadio2
|
7b112e2e207bd7500288b42896f9970c16e623ad
|
[
"Apache-2.0"
] | null | null | null |
tests/gen_test.py
|
tinylambda/tornadio2
|
7b112e2e207bd7500288b42896f9970c16e623ad
|
[
"Apache-2.0"
] | null | null | null |
tests/gen_test.py
|
tinylambda/tornadio2
|
7b112e2e207bd7500288b42896f9970c16e623ad
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
tornadio2.tests.gen
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by the Serge S. Koval, see AUTHORS for more details.
:license: Apache, see LICENSE for more details.
"""
from collections import deque
from nose.tools import eq_
from tornadio2 import gen
_queue = None
def init_environment():
global _queue
_queue = deque()
def run_sync(test, callback):
callback(test)
def queue_async(test, callback):
global _queue
_queue.append((callback, test))
def step_async():
callback = _queue.popleft()
callback[0](callback[1])
def run_async():
global _queue
while True:
try:
step_async()
except IndexError:
break
def run_async_oor():
global _queue
while True:
try:
callback = _queue.pop()
callback[0](callback[1])
except IndexError:
break
class Dummy():
def __init__(self, queue_type):
self.v = None
self.queue_type = queue_type
@gen.sync_engine
def test(self, value):
self.v = yield gen.Task(self.queue_type, value)
class DummyList():
def __init__(self, queue_type):
self.v = []
self.queue_type = queue_type
@gen.sync_engine
def test(self, value):
self.v.append((yield gen.Task(self.queue_type, value)))
class DummyListOutOfOrder():
def __init__(self, queue_type):
self.v = []
self.queue_type = queue_type
@gen.engine
def test(self, value):
self.v.append((yield gen.Task(self.queue_type, value)))
class DummyLoop():
def __init__(self, queue_type):
self.v = 0
self.queue_type = queue_type
@gen.sync_engine
def test(self, value):
for n in range(2):
self.v += (yield gen.Task(self.queue_type, value))
def test():
init_environment()
dummy = Dummy(run_sync)
dummy.test('test')
eq_(dummy.v, 'test')
def test_async():
init_environment()
dummy = Dummy(queue_async)
dummy.test('test')
run_async()
# Verify value
eq_(dummy.v, 'test')
def test_sync_queue():
init_environment()
dummy = DummyList(queue_async)
dummy.test('1')
dummy.test('2')
dummy.test('3')
run_async()
# Verify value
eq_(dummy.v, ['1', '2', '3'])
def test_sync_queue_oor():
init_environment()
dummy = DummyList(queue_async)
dummy.test('1')
dummy.test('2')
dummy.test('3')
run_async_oor()
# Verify value
eq_(dummy.v, ['1', '2', '3'])
def test_async_queue_oor():
init_environment()
dummy = DummyListOutOfOrder(queue_async)
dummy.test('1')
dummy.test('2')
dummy.test('3')
run_async_oor()
# Verify value
eq_(dummy.v, ['3', '2', '1'])
| 17.910256
| 77
| 0.598067
| 362
| 2,794
| 4.38674
| 0.19337
| 0.09068
| 0.098237
| 0.040302
| 0.561083
| 0.509446
| 0.490554
| 0.442065
| 0.438917
| 0.397985
| 0
| 0.015093
| 0.264853
| 2,794
| 155
| 78
| 18.025806
| 0.758033
| 0.084825
| 0
| 0.62766
| 0
| 0
| 0.013444
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.202128
| false
| 0
| 0.031915
| 0
| 0.276596
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bc7c42367a8432fba7810ae50ee93f6f9fc12d32
| 2,516
|
py
|
Python
|
unittests/tools/test_intsights_parser.py
|
M-Rod101/django-DefectDojo
|
7b09a00b1a526abaf40455c2ddec16aaa06b16e2
|
[
"BSD-3-Clause"
] | 249
|
2016-09-06T21:04:40.000Z
|
2018-01-19T15:59:44.000Z
|
unittests/tools/test_intsights_parser.py
|
OWASP/django-DefectDojo
|
c101e47b294863877cd68a82d0cc60f8017b45b1
|
[
"BSD-3-Clause"
] | 255
|
2016-09-06T21:36:37.000Z
|
2018-01-19T19:57:57.000Z
|
unittests/tools/test_intsights_parser.py
|
M-Rod101/django-DefectDojo
|
7b09a00b1a526abaf40455c2ddec16aaa06b16e2
|
[
"BSD-3-Clause"
] | 152
|
2016-09-06T21:04:54.000Z
|
2018-01-18T08:52:24.000Z
|
from ..dojo_test_case import DojoTestCase
from dojo.models import Test
from dojo.tools.intsights.parser import IntSightsParser
class TestIntSightsParser(DojoTestCase):
def test_intsights_parser_with_one_critical_vuln_has_one_findings_json(
self):
testfile = open("unittests/scans/intsights/intsights_one_vul.json")
parser = IntSightsParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(1, len(findings))
finding = list(findings)[0]
self.assertEqual(
'5c80dbf83b4a3900078b6be6',
finding.unique_id_from_tool)
self.assertEqual(
'HTTP headers weakness in initech.com web server',
finding.title)
self.assertEquals('Critical', finding.severity)
self.assertEquals(
"https://dashboard.intsights.com/#/threat-command/alerts?search=5c80dbf83b4a3900078b6be6",
finding.references)
def test_intsights_parser_with_one_critical_vuln_has_one_findings_csv(
self):
testfile = open("unittests/scans/intsights/intsights_one_vuln.csv")
parser = IntSightsParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(1, len(findings))
finding = list(findings)[0]
self.assertEqual(
"mn7xy83finmmth4ja363rci9",
finding.unique_id_from_tool)
self.assertEqual(
"HTTP headers weakness in company-domain.com web server",
finding.title)
def test_intsights_parser_with_many_vuln_has_many_findings_json(self):
testfile = open("unittests/scans/intsights/intsights_many_vul.json")
parser = IntSightsParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(3, len(findings))
def test_intsights_parser_with_many_vuln_has_many_findings_csv(self):
testfile = open("unittests/scans/intsights/intsights_many_vuln.csv")
parser = IntSightsParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(9, len(findings))
def test_intsights_parser_invalid_text_with_error_csv(self):
with self.assertRaises(ValueError):
testfile = open(
"unittests/scans/intsights/intsights_invalid_file.txt")
parser = IntSightsParser()
findings = parser.get_findings(testfile, Test())
| 38.121212
| 102
| 0.677663
| 265
| 2,516
| 6.177358
| 0.267925
| 0.073305
| 0.04887
| 0.067196
| 0.736714
| 0.707392
| 0.653635
| 0.653635
| 0.609652
| 0.483812
| 0
| 0.022786
| 0.232512
| 2,516
| 65
| 103
| 38.707692
| 0.824961
| 0
| 0
| 0.518519
| 0
| 0
| 0.194754
| 0.116852
| 0
| 0
| 0
| 0
| 0.203704
| 1
| 0.092593
| false
| 0
| 0.055556
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bc831b7e95388ec378c7efd07e50c5540c59f285
| 435
|
py
|
Python
|
gullveig/web/__init__.py
|
Addvilz/gullveig
|
6ac5e66062c1b5ea8ad7c66f69be9e3d99ac0825
|
[
"Apache-2.0"
] | 8
|
2020-08-24T14:53:14.000Z
|
2021-03-16T03:58:01.000Z
|
gullveig/web/__init__.py
|
Addvilz/gullveig
|
6ac5e66062c1b5ea8ad7c66f69be9e3d99ac0825
|
[
"Apache-2.0"
] | 6
|
2020-08-25T13:19:02.000Z
|
2021-02-21T21:55:34.000Z
|
gullveig/web/__init__.py
|
Addvilz/gullveig
|
6ac5e66062c1b5ea8ad7c66f69be9e3d99ac0825
|
[
"Apache-2.0"
] | null | null | null |
import logging
from gullveig import bootstrap_default_logger
# Configure default logging
def _configure_default_web_logger():
logger = logging.getLogger('gullveig-web')
bootstrap_default_logger(logger)
api_logger = logging.getLogger('gullveig-api')
bootstrap_default_logger(api_logger)
aio_logger = logging.getLogger('aiohttp.server')
bootstrap_default_logger(aio_logger)
_configure_default_web_logger()
| 22.894737
| 52
| 0.795402
| 51
| 435
| 6.392157
| 0.294118
| 0.196319
| 0.269939
| 0.153374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128736
| 435
| 18
| 53
| 24.166667
| 0.860158
| 0.057471
| 0
| 0
| 0
| 0
| 0.093137
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.2
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bc926bb3d7c2f20a37f4cae0b86f7455ebdb913c
| 1,430
|
py
|
Python
|
scalability/tests/test_misc.py
|
ggreif/ic
|
ac56ec91f077c00d59eea3f73f51e14a1b3ea882
|
[
"Apache-2.0"
] | 941
|
2021-05-10T08:14:14.000Z
|
2022-03-31T11:40:24.000Z
|
scalability/tests/test_misc.py
|
ggreif/ic
|
ac56ec91f077c00d59eea3f73f51e14a1b3ea882
|
[
"Apache-2.0"
] | 3
|
2022-02-16T12:24:20.000Z
|
2022-03-23T12:05:41.000Z
|
scalability/tests/test_misc.py
|
ggreif/ic
|
ac56ec91f077c00d59eea3f73f51e14a1b3ea882
|
[
"Apache-2.0"
] | 122
|
2021-05-10T08:21:23.000Z
|
2022-03-25T20:34:12.000Z
|
import unittest
from unittest import TestCase
from misc import verify
class TestVerify(TestCase):
"""Tests misc.py verifies function."""
def test_verify__with_zero_threshold_and_expected_succeeds(self):
"""Test passes when expected rate, actual rate and threshold are all zero."""
result = verify(metric="Query failure rate", actual=0.0, expected=0.0, threshold=0.0)
self.assertEqual(result, 0)
def test_verify__fails_when_positive_delta_is_larger_than_postive_threshold(self):
"""Test fails when positive delta between actual rate and expected rate exceeds positive threshold."""
result = verify(metric="Update latency", actual=200, expected=100, threshold=0.1)
self.assertEqual(result, 1)
def test_verify__fails_when_negative_delta_is_smaller_than_negative_threshold(self):
"""Test fails when negative delta between actual rate and expected rate exceeds negative threshold."""
result = verify(metric="Update latency", actual=50, expected=100, threshold=-0.01)
self.assertEqual(result, 1)
def test_verify__fails_when_negative_delta_and_positive_threshold(self):
"""Test fails when delta between actual rate and expected rate exceeds threshold."""
result = verify(metric="Update latency", actual=50, expected=100, threshold=0.01)
self.assertEqual(result, 0)
if __name__ == "__main__":
unittest.main()
| 43.333333
| 110
| 0.735664
| 189
| 1,430
| 5.31746
| 0.285714
| 0.053731
| 0.051741
| 0.053731
| 0.552239
| 0.452736
| 0.452736
| 0.406965
| 0.275622
| 0.275622
| 0
| 0.02874
| 0.172727
| 1,430
| 32
| 111
| 44.6875
| 0.820795
| 0.263636
| 0
| 0.222222
| 0
| 0
| 0.066212
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.222222
| false
| 0
| 0.166667
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bc9f42407dc824808c93da43b669882c77d6d9f4
| 9,461
|
py
|
Python
|
web/app/forms.py
|
Devidence7/Break
|
f961b1b46977c86739ff651fe81a1d9fff98a8e1
|
[
"MIT"
] | null | null | null |
web/app/forms.py
|
Devidence7/Break
|
f961b1b46977c86739ff651fe81a1d9fff98a8e1
|
[
"MIT"
] | null | null | null |
web/app/forms.py
|
Devidence7/Break
|
f961b1b46977c86739ff651fe81a1d9fff98a8e1
|
[
"MIT"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import Form, StringField, PasswordField, BooleanField, SubmitField, IntegerField, validators, FileField, \
MultipleFileField, SelectField, RadioField, HiddenField, DecimalField, TextAreaField
from wtforms.fields.html5 import DateField
from wtforms.validators import DataRequired
# Structure of the Login form
class LoginForm(Form):
email = StringField('Email', [
validators.DataRequired(message='Es necesario introducir un email')])
password = PasswordField('Contraseña', [
validators.DataRequired(message='Es necesario introducir una contraseña')])
remember_me = BooleanField('Recuerdame')
submit = SubmitField('Iniciar Sesión')
# Structure of the Register form
class RegisterForm(Form):
name = StringField('Nombre', [
validators.DataRequired(message='Es necesario introducir un nombre'),
validators.Length(min=4, max=50, message='El tamaño máximo del nombre son 50 carácteres')])
lastname = StringField('Apellidos', [
validators.DataRequired(message='Es necesario introducir apellidos'),
validators.Length(min=4, max=50, message='El tamaño máximo del nombre son 50 carácteres')])
# username = StringField('Username', [
# validators.Length(min=4, max=25, message='El nombre de usuario debe tener entre 4 y 25 carácteres')])
email = StringField('Email', [
validators.DataRequired(message='Es necesario introducir un email'),
validators.Length(min=1, max=50, message='El email no puede contener más de 50 carácteres')])
password = PasswordField('Contraseña', [
validators.DataRequired(message='Es necesario una contraseña'),
validators.Length(min=8, message='La contraseña debe tener al menos 8 caracteres')
])
confirm = PasswordField('Confirmar Contraseña', [
validators.EqualTo('password', message='Las contraseñas no coinciden')
])
# Structure of the Login form
class RestorePasswordForm(Form):
email = StringField('Email', [
validators.DataRequired(message='Es necesario introducir un email')])
submit = SubmitField("Correo de Recuperación")
class EditProfile(FlaskForm):
name = StringField('Nombre', [
validators.DataRequired(message='Es necesario introducir un nombre'),
validators.Length(min=4, max=50, message='El tamaño máximo del nombre son 50 carácteres')])
lastname = StringField('Apellidos', [
validators.DataRequired(message='Es necesario introducir apellidos'),
validators.Length(min=4, max=50, message='El tamaño máximo del nombre son 50 carácteres')])
gender = RadioField('Género', choices = [('hombre','Hombre'),('mujer','Mujer')])
submit = SubmitField('Guardar cambios')
class EditLocation(FlaskForm):
lat = HiddenField('Latitud', [
validators.DataRequired(message='No se ha podido obtener la nueva localización')
])
lng = HiddenField('Longitud', [
validators.DataRequired(message='No se ha podido obtener la nueva localización')
])
submit = SubmitField('Establecer ubicación')
class EditPassword(FlaskForm):
old = PasswordField('Contraseña Anterior', [
validators.DataRequired(message='Es necesario introducir una contraseña')
])
password = PasswordField('Eliga una contraseña', [
validators.DataRequired(message='Es necesario introducir una contraseña'),
validators.Length(min=8, message='La contraseña debe tener al menos 8 caracteres')
])
confirm = PasswordField('Confirme la contraseña', [
validators.EqualTo('password', message='Las contraseñas no coinciden')
])
submit = SubmitField('Cambiar contraseña')
class EditEmail(FlaskForm):
email = StringField('Correo electrónico', [
validators.DataRequired(message='Es necesario introducir una dirección de correo'),
validators.Length(min=1, max=50, message='El correo no puede contener más de 50 carácteres')])
confirm = StringField('Confirmar correo electrónico', [
validators.EqualTo('email', message='Los correos no coinciden')
])
submit = SubmitField('Cambiar correo')
class EditPicture(FlaskForm):
picture = FileField('Imagen de perfil')
submit = SubmitField('Establecer imagen')
delete = SubmitField('Eliminar imagen')
class DeleteAccount(FlaskForm):
delete = SubmitField("Eliminar cuenta")
# Structure of the Subir Anuncio form
class SubirAnuncioForm(FlaskForm):
# pictures = HiddenField("Imágenes")
# mimes = HiddenField("Formatos de imagen")
name = StringField('Nombre del producto', [
validators.DataRequired(message='Es necesario introducir un nombre de producto'),
validators.Length(min=1, max=50, message='El tamaño máximo del nombre del producto son 50 carácteres')])
price = DecimalField('Precio (€)', [
validators.DataRequired(message='Es necesario introducir un precio'),
validators.NumberRange(min=0, max=1000000, message='El precio intoducido no es válido (de 0 € a 999.999,99 €)')])
category = SelectField('Categoría',
choices = [
('Automoción', 'Automoción'),
('Informática', 'Informática'),
('Moda', 'Moda'),
('Deporte y ocio', 'Deporte y ocio'),
('Videojuegos', 'Videojuegos'),
('Libros y música', 'Libros y música'),
('Hogar y jardín', 'Hogar y jardín'),
('Foto y audio', 'Foto y audio')
], validators = [
validators.DataRequired(message='Es necesario seleccionar una categoría') ])
description = TextAreaField('Descripción', [
validators.DataRequired(message='Es necesario escribir una descripción')])
lat = HiddenField('Latitud')
lng = HiddenField('Longitud')
enddate = DateField('End', format = '%Y-%m-%d', description = 'Time that the event will occur',
validators= [validators.Optional()] )
submit = SubmitField('Publicar')
class ProductSearch(Form):
categories = ['Automoción', 'Informática', 'Moda', 'Deporte y ocio', 'Videojuegos', 'Libros y música', 'Hogar y jardín', 'Foto y audio']
category = SelectField('Categoría',
choices = [
('Automoción', 'Automoción'),
('Informática', 'Informática'),
('Moda', 'Moda'),
('Deporte y ocio', 'Deporte y ocio'),
('Videojuegos', 'Videojuegos'),
('Libros y música', 'Libros y música'),
('Hogar y jardín', 'Hogar y jardín'),
('Foto y audio', 'Foto y audio')
])
estados = [('en venta', 'En Venta'), ('vendido', 'Vendido')]
resultadosporpag = ['15', '30', '45', '60', '75', '90']
ordenacionlist = [('published ASC', 'Fecha (Más viejos primero)'), ('published DESC', 'Fecha (Más nuevos primero)'), ('distance DESC', 'Distancia Descendente'), ('distance ASC', 'Distancia Ascendente'), ('price ASC', 'Precio Ascendente'), ('price DESC', 'Precio Descendente'), ('views DESC', 'Popularidad descendente')]
status = SelectField('Estado',
choices = [
('en venta','En Venta'),
('vendido','Vendido')
])
keywords = StringField('Palabras Clave')
minprice = StringField('Precio Mínimo')
maxprice = StringField('Precio Máximo')
minpublished = DateField('Start', format = '%Y-%m-%d', description = 'Time that the event will occur')
maxpublished = DateField('Start', format = '%Y-%m-%d', description = 'Time that the event will occur')
resultados = SelectField('Resultados Por Página',
choices = [
('15', '15'),
('30', '30'),
('45', '45'),
('60', '60'),
('75', '75'),
('90', '90')
])
ordenacion = SelectField('Ordenación de Resultados',
choices = [
('published ASC', 'Fecha (Más viejos primero)'),
('published DESC', 'Fecha (Más nuevos primero)'),
('distance DESC', 'Distancia Descendente'),
('distance ASC', 'Distancia Ascendente'),
('price ASC', 'Precio Ascendente'),
('price DESC', 'Precio Descendente'),
('views DESC', 'Popularidad descendente')
])
distancia = StringField('Distancia')
submit = SubmitField('Buscar')
class Review(FlaskForm):
stars = IntegerField('Puntuación', [
validators.DataRequired(message='Es necesario introducir una puntuación entre 1 y 5'),
validators.NumberRange(min=1, max=5, message='La puntuación debe ser de 1 a 5 estrellas')])
comment = TextAreaField('Comentario', [
validators.DataRequired(message='Es necesario escribir un comentario')])
submit = SubmitField('Publicar Valoración')
class bidPlacementForm(FlaskForm):
amount = StringField('Cantidad')
submit = SubmitField('Realizar Puja')
class reportForm(Form):
category = SelectField('Categoría',
choices = [
('Sospecha de fraude', 'Sospecha de fraude'),
('No acudió a la cita', 'No acudió a la cita'),
('Mal comportamiento', 'Mal comportamiento'),
('Artículo defectuoso', 'Artículo defectuoso'),
('Otros', 'Otros')])
description = TextAreaField('Descripción del informe', [
validators.DataRequired(message='Es necesario escribir una descripción')])
submit = SubmitField('Publicar Informe')
| 48.025381
| 323
| 0.649931
| 957
| 9,461
| 6.426332
| 0.258098
| 0.075122
| 0.099024
| 0.095772
| 0.56374
| 0.539024
| 0.507317
| 0.47122
| 0.396423
| 0.353008
| 0
| 0.014233
| 0.220273
| 9,461
| 196
| 324
| 48.270408
| 0.819032
| 0.036043
| 0
| 0.376471
| 0
| 0
| 0.374383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.064706
| 0.023529
| 0
| 0.458824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
bcaae8938e310a72ba14496462496246c713e82d
| 577
|
py
|
Python
|
contrib/micronet/scripts/file2buf.py
|
pmalhaire/WireHub
|
588a372e678b49557deed6ba88a896596222fb2d
|
[
"Apache-2.0"
] | 337
|
2018-12-21T22:13:57.000Z
|
2019-11-01T18:35:10.000Z
|
contrib/micronet/scripts/file2buf.py
|
nask0/WireHub
|
588a372e678b49557deed6ba88a896596222fb2d
|
[
"Apache-2.0"
] | 8
|
2018-12-24T20:16:40.000Z
|
2019-09-02T11:54:48.000Z
|
contrib/micronet/scripts/file2buf.py
|
nask0/WireHub
|
588a372e678b49557deed6ba88a896596222fb2d
|
[
"Apache-2.0"
] | 18
|
2018-12-24T02:49:38.000Z
|
2019-07-31T20:00:47.000Z
|
#!/usr/bin/env python3
import os
import sys
MAX = 8
fpath = sys.argv[1]
name = sys.argv[2]
with open(fpath, "rb") as fh:
sys.stdout.write("char %s[] = {" % (name,) )
i = 0
while True:
if i > 0:
sys.stdout.write(", ")
if i % MAX == 0:
sys.stdout.write("\n\t")
c = fh.read(1)
if not c:
sys.stdout.write("\n")
break
sys.stdout.write("0x%.2x" % (ord(c), ))
i = i + 1
print("};")
print("")
print("unsigned int %s_sz = %s;" % (name, i))
print("")
| 15.594595
| 49
| 0.443674
| 82
| 577
| 3.109756
| 0.5
| 0.176471
| 0.27451
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029891
| 0.362218
| 577
| 36
| 50
| 16.027778
| 0.663043
| 0.036395
| 0
| 0.086957
| 0
| 0
| 0.099278
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.086957
| 0
| 0.086957
| 0.173913
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bcae93da2c9dcb0c8765b93504dcb020462aad8e
| 1,696
|
py
|
Python
|
game/ball.py
|
geoncic/PyBlock
|
69c8220e38a21b7e1c6dd2196752173f9e78981f
|
[
"MIT"
] | null | null | null |
game/ball.py
|
geoncic/PyBlock
|
69c8220e38a21b7e1c6dd2196752173f9e78981f
|
[
"MIT"
] | null | null | null |
game/ball.py
|
geoncic/PyBlock
|
69c8220e38a21b7e1c6dd2196752173f9e78981f
|
[
"MIT"
] | null | null | null |
import pygame
import pygame.gfxdraw
from constants import Constants
class Balls(object):
def __init__(self, all_sprites, all_balls):
self.all_sprites = all_sprites
self.all_balls = all_balls
def spawn_ball(self, pos, vel, team):
# Todo: Figure out how to spawn multiple balls with some sort of delay
ball = Ball(pos, vel, team)
self.all_sprites.add(ball)
self.all_balls.add(ball)
def ball_test(self):
print("This is a Ball Test!")
print(self)
def update(self):
print(self.__dict__)
print(type(self))
class Ball(pygame.sprite.Sprite):
def __init__(self, pos, vel, team):
super().__init__()
self.color = team
self.file = Constants.BALL_TEAMS[self.color]
self.rad = int(Constants.BALL_SIZE/2)
self.image = pygame.Surface([Constants.BALL_SIZE, Constants.BALL_SIZE], pygame.SRCALPHA)
pygame.draw.circle(self.image, self.file, (self.rad, self.rad), self.rad)
self.x_pos = pos[0]
self.y_pos = pos[1]
self.rect = self.image.get_rect(center=(self.x_pos, self.y_pos))
self.dx = vel[0]
self.dy = vel[1]
def update(self):
self.check_boundary()
self.x_pos += self.dx
self.y_pos += self.dy
self.rect.center = [self.x_pos, self.y_pos]
# self.rect.center = pygame.mouse.get_pos() # has sprite follow the mouse
def check_boundary(self):
if not Constants.PLAYER_WIDTH <= self.x_pos <= (Constants.PLAYER_WIDTH+Constants.BOARD_WIDTH):
self.dx = -1*self.dx
if not 0 <= self.y_pos <= Constants.SCREEN_HEIGHT:
self.dy = -1*self.dy
| 30.836364
| 102
| 0.625
| 245
| 1,696
| 4.130612
| 0.302041
| 0.041502
| 0.039526
| 0.035573
| 0.083992
| 0.059289
| 0.059289
| 0.059289
| 0.059289
| 0
| 0
| 0.006339
| 0.255896
| 1,696
| 54
| 103
| 31.407407
| 0.795563
| 0.081958
| 0
| 0.05
| 0
| 0
| 0.012878
| 0
| 0
| 0
| 0
| 0.018519
| 0
| 1
| 0.175
| false
| 0
| 0.075
| 0
| 0.3
| 0.1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bcaebabdfa8553517a45b393cf40eff654bc096f
| 36,597
|
py
|
Python
|
program/eggUI.py
|
otills/embryocv
|
d501f057bada15ff5dc753d3dae5a883b5c9e244
|
[
"MIT"
] | 1
|
2020-08-05T02:47:12.000Z
|
2020-08-05T02:47:12.000Z
|
program/eggUI.py
|
otills/embryocv
|
d501f057bada15ff5dc753d3dae5a883b5c9e244
|
[
"MIT"
] | null | null | null |
program/eggUI.py
|
otills/embryocv
|
d501f057bada15ff5dc753d3dae5a883b5c9e244
|
[
"MIT"
] | 1
|
2020-08-05T02:47:16.000Z
|
2020-08-05T02:47:16.000Z
|
from pyqtgraph.Qt import QtCore, QtGui
import numpy as np
from scipy.spatial import distance as dist
import glob
import re
import os
from PyQt5 import QtGui
from PyQt5.QtCore import *
from PyQt5.QtGui import *
import sys
import cv2
import pandas as pd
from PyQt5.Qt import *
import pyqtgraph as pg
#from PyQt4.Qt import *
#%%
class eggUI(QDialog):
'''
createOpenCVEggROI : take eggID defined ROIs and visualise
'''
sliderUpdate = QtCore.pyqtSignal()
embryoUpdate = QtCore.pyqtSignal()
keyPressed = QtCore.pyqtSignal()
def __init__(self, parent=None):
super(eggUI, self).__init__(parent)
# Make QDialog
self.diag = QtGui.QDialog()
global parentPath, vidTime
self.diag.setWindowTitle('Identify eggs')
self.diag.imv = pg.ImageView()
self.btn_save = QPushButton('Save', self)
#==============================================================================
#
#==============================================================================
def showUI(self,ims,eggRotBBox, eggBoxPoints, embryoLabels, eggInt):
self.eggInt = eggInt
self.embryoLabels = embryoLabels
self.diag.setWindowTitle('Identify eggs')
# Make ImageView
self.diag.imv = pg.ImageView()
self.diag.resize(1000,600)
# Make ROI
self.importOpenCVROIs(eggRotBBox, eggBoxPoints)
if (eggRotBBox[0][0][0] != 'nan'):
self.createOpenCVEggROI()
self.diag.imv.addItem(self.roi)
# Remove buttons from ImageView widget
self.diag.imv.ui.roiBtn.hide()
self.diag.imv.ui.menuBtn.hide()
# Make tableview
self.diag.table = QtGui.QTableWidget()
self.diag.table.setShowGrid(True)
self.diag.table.setHorizontalHeaderLabels(['Embryo', 'Sorted'])
# Sets different alignment data just on the first column
self.diag.table.setRowCount(int(len(self.embryoLabels)))
self.diag.table.setColumnCount(2)
# Highlight first row
self.diag.table.selectRow(0)
# Make layout
checkLayout = QGridLayout()
# Deal with stretching for approrpraite formatting.
checkLayout.setColumnStretch(0, 3)
checkLayout.setColumnStretch(1, 1)
checkLayout.setRowStretch(0, 1)
checkLayout.setRowStretch(1, 3)
# Add to layout
checkLayout.addWidget(self.diag.imv,0,0,2,2)
checkLayout.addWidget(self.diag.table,1,5)
# Apply layout
self.diag.setLayout(checkLayout)
# Make buttons
self.cpROI_btn = QtGui.QPushButton('&Copy ROI')
self.cpROI_btn.setMinimumHeight(40);
self.useCpROI_btn = QtGui.QPushButton('&Use Copied ROI')
self.useCpROI_btn.setMinimumHeight(40);
self.noEgg_btn = QtGui.QPushButton('&No Egg')
self.noEgg_btn.setMinimumHeight(40);
self.approveROI_btn = QtGui.QPushButton('&Approve ROIs')
self.approveROI_btn.setMinimumHeight(40);
self.exit_btn = QtGui.QPushButton('Exit')
self.exit_btn.setMinimumHeight(40);
# Make button layout
self.btnLayout = QGridLayout()
self.btnLayout.addWidget(self.cpROI_btn,0,0)
self.btnLayout.addWidget(self.useCpROI_btn,0,1)
self.btnLayout.addWidget(self.noEgg_btn,1,1)
self.btnLayout.addWidget(self.approveROI_btn,1,0)
# Exit button not implemented, just use window x (topRight).
# self.btnLayout.addWidget(self.exit_btn,2,1)
# Add button layout to GridLayout.
checkLayout.addLayout(self.btnLayout,0,5)
# Format images for pyqtgraph and put in ImageView
# self.formatSequence(ims)
self.imImport()
self.diag.imv.setImage(self.compSeq)
# Add the ROI to ImageItem
self.diag.show()
# Call function to add data
self.dataForTable()
# Function for modifying the table when ROI is approved.
self.approveROI_btn.clicked.connect(self.updateTable)
# Copy current ROI
self.cpROI_btn.clicked.connect(self.cpROI)
# Apply copied ROI
self.useCpROI_btn.clicked.connect(self.applyCopiedROI)
# Assign nan to frames not containing egg
self.noEgg_btn.clicked.connect(self.recordNoEgg)
# Exit - prompt user to confirm
#self.exit_btn.clicked.connect(self.closeEvent)
# Connect changes in timeline so correct ROI is created and displayed.
self.diag.imv.timeLine.sigPositionChanged.connect(self.updateOpenCVEggROICurrEmbryo)
#self.diag.keyPressEvent(self.keyPressEvent)
#==============================================================================
# Generate data for populating the embryo/approveROI table.
#==============================================================================
def dataForTable(self):
self.tableData = {'Embryo':list(self.embryoLabels),
'ROI approved':['No'] * len(list(self.embryoLabels))}
self.tableCols = [QtGui.QColor(0,0,100,120)]* len(list(self.embryoLabels))
# Enter data onto Table
horHeaders = []
for n, key in enumerate(sorted(self.tableData.keys())):
horHeaders.append(key)
for m, item in enumerate(self.tableData[key]):
newitem = QtGui.QTableWidgetItem(item)
newitem.setBackground(QtGui.QColor(0,0,100,120))
self.diag.table.setItem(m, n, newitem)
# Add Header
self.diag.table.setHorizontalHeaderLabels(horHeaders)
# Adjust size of Table
self.diag.table.resizeRowsToContents()
# self.diag.table.resizeColumnsToContents()
#==============================================================================
# Update table when approve ROI button clicked.
#==============================================================================
def updateTable(self):
self.tableData['ROI approved'][self.diag.table.currentRow()] = 'Approved'
self.tableCols[self.diag.table.currentRow()] = QtGui.QColor(0,100,0,120)
horHeaders = []
for n, key in enumerate(sorted(self.tableData.keys())):
horHeaders.append(key)
for m, item in enumerate(self.tableData[key]):
newitem = QtGui.QTableWidgetItem(item)
self.diag.table.setItem(m, n, newitem)
newitem.setBackground(self.tableCols[m])
#Add Header
self.diag.table.setHorizontalHeaderLabels(horHeaders)
#Adjust size of Table
self.diag.table.resizeRowsToContents()
#==============================================================================
# Update the user interface
#==============================================================================
def updateUI(self,ims,eggRotBBox, eggBoxPoints):
self.imImport()
self.diag.imv.setImage(self.compSeq)
self.importOpenCVROIs(eggRotBBox, eggBoxPoints)
self.getSeqValsAndCurrROI()
self.updateOpenCVEggROINewEmbryo()
# Add the ROI to ImageItem
#self.diag.imv.addItem(self.roi)
#==============================================================================
# Deal with data from the dataHandling class
#==============================================================================
def formatSequence(self,ims):
# Format seq appropriately for pyqtgraph ROIs
self.tSeqd = np.zeros_like(ims)
for l in range(len(self.tSeqd)):
self.tSeqd[l] = ims[l].T
#==============================================================================
# Get folders for a particular embryo
#==============================================================================
def getEmbryoFolders(self, parentPath, embryo):
self.parentPath = parentPath
self.embryo = embryo
self.embryoFolders = glob.glob(parentPath + "*/" + embryo +"/")
self.embryoFolders.sort(key=os.path.getctime)
#==============================================================================
# Get image
#==============================================================================
def imImport(self):
for f in range(len(self.eggUIimPaths)):
im = cv2.imread(self.eggUIimPaths[f],cv2.IMREAD_ANYDEPTH)
ran = (im.max()-im.min())/255.
out = (im/ran)
out = out-out.min()
self.compSeq[int(f)] = out.astype(np.uint8)
self.compSeq[f] = self.compSeq[f].T
#==============================================================================
# Update image iteratively when slider moved
#==============================================================================
#==============================================================================
# def updateImage(self):
# self.getSeqValsAndCurrROI()
# #self.UI.compSeq[e*len(self.eggIDIms):(e*len(self.eggIDIms)+len(self.eggIDIms))] = self.seq
# #self.UI.comp(self.imImport(self.diag.imv.currentIndex()))
# im = cv2.imread(self.eggUIimPaths[self.diag.imv.currentIndex],cv2.IMREAD_ANYDEPTH)
# ran = (im.max()-im.min())/255.
# out = (im/ran)
# out = out-out.min()
# self.compSeq[self.diag.imv.currentIndex] = out.astype(np.uint8)
# self.diag.imv.setImage(self.compSeq.T)
# self.diag.imv.show()
# #========
#==============================================================================
#==============================================================================
# ROI functions
#==============================================================================
#==============================================================================
# Import OpenCV determined ROIs from dataHandling instance. Called from showUI and updateUI.
#==============================================================================
def importOpenCVROIs(self,eggRotBBox, eggBoxPoints):
self.eggRotBBox = eggRotBBox
self.eggBoxPoints = eggBoxPoints
self.originalEggRotBBox = eggRotBBox.copy()
self.originalEggBoxPoints = eggBoxPoints.copy()
#==============================================================================
# Get index values for ROI data.
#==============================================================================
def getSeqValsAndCurrROI(self):
# Calculate the indices for current frame
if self.eggInt != 1234:
self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1]))
self.intDivVal = int(self.divVal)
self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal]))
self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal]
self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal]
else:
self.divVal = self.diag.imv.currentIndex
self.intDivVal = int(self.divVal)
self.currROI_eggRotBBox = self.eggRotBBox[0,self.intDivVal]
self.currROI_eggBoxPoints = self.eggBoxPoints[0,self.intDivVal]
#==============================================================================
# Generate a pyqtgraph ROI, using data from OpenCV.
#==============================================================================
def createOpenCVEggROI(self):
# Get relevant sequence position and ROI.
self.getSeqValsAndCurrROI()
if (self.currROI_eggRotBBox[0] != 'nan'):
# 0 or 90 degree angles seem very buggy. Shift to 1 and 89 as a bodge fix.
if self.currROI_eggRotBBox[4] == -90:
#self.currROI_eggRotBBox[4] = -89
# Get rotated bounding box points
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
elif self.currROI_eggRotBBox[4] == -0:
#self.currROI_eggRotBBox[4] = -1
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
elif self.currROI_eggRotBBox[4] == -180:
#self.currROI_eggRotBBox[4] = -179
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
else:
# Get rotated bounding box points
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
# Make ROI - note non 0,or 90 degree angles, require different of the X size
# Rectangular ROI used to enable more easy handling of corner handles for tracking user chagnges.
if (self.currROI_eggRotBBox[4] == -90.0) | (self.currROI_eggRotBBox[4] == -0.0)| (self.currROI_eggRotBBox[4] == 0.0):
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
# roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])
# Debug
# print 'no angle'
else:
# Random angle ROIs
self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
self.roi.setAngle(self.currROI_eggRotBBox[4], update=True)
# roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])
# Add handles
self.roi.addRotateHandle([1, 0],[0.5,0.5])
self.roi.addRotateHandle([0, 1], [0.5,0.5])
self.roi.addScaleHandle([1, 1], [0, 0])
self.roi.addScaleHandle([0, 0], [1, 1])
self.roi.setPen('y',width=3)
self.roi.removable
self.roi.invertible = 'True'
# Make var for dealing with modifications to roi
self.updatedEggROI=[]
self.roi.sigRegionChangeFinished.connect(self.updateROI)
#else:
#==============================================================================
# Update the ROI for current embryo.
#==============================================================================
def updateOpenCVEggROICurrEmbryo(self):
# Remove previous
if (hasattr(self, 'roi')):
self.diag.imv.removeItem(self.roi)
# Get relevant video position and ROI.
self.getSeqValsAndCurrROI()
# 0 or 90 degree angles seem very buggy. Shift to 1 and 89 as a bodge fix.
if self.currROI_eggRotBBox[4] == -90:
#self.currROI_eggRotBBox[4] = -89
# Get rotated bounding box points
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
elif self.currROI_eggRotBBox[4] == -0:
#self.currROI_eggRotBBox[4] = -1
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
elif self.currROI_eggRotBBox[4] == -180:
#self.currROI_eggRotBBox[4] = -179
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
else:
# Get rotated bounding box points
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
# Make ROI - note non 0,or 90 degree angles, require different of the X size
# Rectangular ROI used to enable more easy handling of corner handles for tracking user chagnges.
if (self.currROI_eggRotBBox[4] == -90.0) | (self.currROI_eggRotBBox[4] == -0.0)| (self.currROI_eggRotBBox[4] == 0.0):
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
# roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])
# Debug
# print 'no angle'
else:
# Random angle ROIs
self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
self.roi.setAngle(self.currROI_eggRotBBox[4], update=True)
# roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])
# roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])
# Add handles
self.roi.addRotateHandle([1, 0],[0.5,0.5])
self.roi.addRotateHandle([0, 1], [0.5,0.5])
self.roi.addScaleHandle([1, 1], [0, 0])
self.roi.addScaleHandle([0, 0], [1, 1])
self.roi.setPen('y',width=3)
self.roi.removable
self.roi.invertible = 'True'
# Make var for dealing with modifications to roi
self.updatedEggROI=[]
### Still to do...
self.diag.imv.addItem(self.roi)
self.roi.sigRegionChangeFinished.connect(self.updateROI)
#==============================================================================
# Update ROI for new embryo.
#==============================================================================
def updateOpenCVEggROINewEmbryo(self):
# Remove old ROI
if (hasattr(self, 'roi')):
self.diag.imv.removeItem(self.roi)
# Get relevant video position and ROI
self.getSeqValsAndCurrROI()
# 0 or 90 degree angles seem very buggy. Shift to 1 and 89 as a bodge fix.
if self.currROI_eggRotBBox[4] == -90:
#self.currROI_eggRotBBox[4] = -89
# Get rotated bounding box points
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
elif self.currROI_eggRotBBox[4] == -0:
#self.currROI_eggRotBBox[4] = -1
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
elif self.currROI_eggRotBBox[4] == -180:
#self.currROI_eggRotBBox[4] = -179
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
else:
# Get rotated bounding box points
ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]
# Get bottom most, and top most sorted corner points
bottomMost = ySorted[:2, :]
topMost = ySorted[2:, :]
# Get bottom most
bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
(bl, br) = bottomMost
# Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# The point with the largest distance will be our bottom-right point
D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
(tl, tr) = topMost[np.argsort(D)[::-1], :]
# Make ROI - note non 0,or 90 degree angles, require different of the X size
# Rectangular ROI used to enable more easy handling of corner handles for tracking user chagnges.
if (self.currROI_eggRotBBox[4] == -90.0) | (self.currROI_eggRotBBox[4] == -0.0)| (self.currROI_eggRotBBox[4] == 0.0):
self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
# roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])
# Debug
# print 'no angle'
else:
# Random angle ROIs
self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])
self.roi.setAngle(self.currROI_eggRotBBox[4], update=True)
# Add handles
self.roi.addRotateHandle([1, 0],[0.5,0.5])
self.roi.addRotateHandle([0, 1], [0.5,0.5])
self.roi.addScaleHandle([1, 1], [0, 0])
self.roi.addScaleHandle([0, 0], [1, 1])
self.roi.setPen('y',width=3)
self.roi.removable
self.roi.invertible = 'True'
# Make var for dealing with modifications to roi
self.updatedEggROI=[]
### Still to do...
self.diag.imv.addItem(self.roi)
self.roi.sigRegionChangeFinished.connect(self.updateROI)
#==============================================================================
# Update ROI.
#==============================================================================
def updateROI(self):
#global vidTime, xyPosHandles, ellipse, changeAngle, roiChanges,updatedEggROI, changeX, changeY, changeScaleX, changeScaleY, changeAngle
# Get changes to ROI scale, angle and position
roiChanges = self.roi.getGlobalTransform()
changeX = -roiChanges.getTranslation()[0]
changeY = roiChanges.getTranslation()[1]
changeScaleX = roiChanges.getScale()[0]
changeScaleY = roiChanges.getScale()[1]
changeAngle = roiChanges.getAngle()
# Update ROI, either updating the previously updated or taking the unaltered ROI from OpenCV as a starting point.
#if len(self.updatedEggROI) == 0:
self.updatedEggROI = (((self.currROI_eggRotBBox[0]-changeX),(self.currROI_eggRotBBox[1]+changeY)),((max((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY))),(min((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY)))),self.currROI_eggRotBBox[4]+changeAngle)
#else:
#self.updatedEggROI = (((self.updatedEggROI[0][0]-changeX),(self.updatedEggROI[0][1]+changeY)),((max((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY))),(min((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY)))),self.updatedEggROI[2]+changeAngle)
hh = self.roi.getHandles()
hh = [self.roi.mapToItem(self.diag.imv.getImageItem(), h.pos()) for h in hh]
# Handle on each corner. Get handle positions
self.xyPosHandles =[]
for h in hh:
self.xyPosHandles.append([h.x(),h.y()])
(eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng = cv2.minAreaRect(np.array(self.xyPosHandles, dtype=np.int32) )
if eggBBAng == -90:
eggBBAng = -89
elif eggBBAng == -180:
eggBBAng = -179
elif eggBBAng == -0:
eggBBAng = -1
# Save updated
# If more than one frame eggID per sequence..
if self.eggInt != 1234:
self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng]
self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng))
# Otherwise just save simply
else:
self.eggRotBBox[0,self.intDivVal] = [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng]
self.eggBoxPoints[0,self.intDivVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng))
#==============================================================================
# Copy ROI on button click.
#==============================================================================
def cpROI(self):
self.originalEggRotBBox = self.currROI_eggRotBBox
self.originalEggBoxPoints = self.currROI_eggBoxPoints
#==============================================================================
# Assign nan to current ROI if 'No Egg' button clicked
#==============================================================================
def recordNoEgg(self):
# Remove ROI
self.diag.imv.removeItem(self.roi)
# Store nans in place of ROI
if self.eggInt != 1234:
self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [np.nan, np.nan, np.nan, np.nan, np.nan]
self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan]
else:
self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan]
self.eggRotBBox[0,self.intDivVal] = [np.nan, np.nan, np.nan, np.nan, np.nan]
#==============================================================================
# Copy ROI on button click.
#==============================================================================
def applyCopiedROI(self):
self.getSeqValsAndCurrROI()
# Store copied ROI to embryo sequence ROIs
if self.eggInt != 1234:
self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1]))
self.intDivVal = int(self.divVal)
self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal]))
self.eggRotBBox[self.intDivVal,self.withinSeqVal] = self.originalEggRotBBox
self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = self.originalEggBoxPoints
else:
self.divVal = self.diag.imv.currentIndex
self.intDivVal = int(self.divVal)
self.eggRotBBox[0,self.intDivVal] = self.originalEggRotBBox
self.eggBoxPoints[0,self.intDivVal] = self.originalEggBoxPoints
self.updateOpenCVEggROICurrEmbryo()
#==============================================================================
#
#==============================================================================
#==============================================================================
# Close button - not implemented (hidden)
#==============================================================================
#==============================================================================
# def closeEvent(self, event):
#
# quit_msg = "Are you sure you want to exit the program?"
# reply = QtGui.QMessageBox.question(self, 'Message',
# quit_msg, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
#
# if reply == QtGui.QMessageBox.Yes:
# #event.accept()
# app.quit()
# else:
# event.ignore()
#
#==============================================================================
#==============================================================================
# #self.originalEggRotBBox = eggRotBBox.copy()
# #self.originalEggBoxPoints = eggBoxPoints.copy()
# #self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal]
# #self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal]
#
# # Modified version of updateOpenCVEggROICurrEmbryo
# # Remove previous
# self.diag.imv.removeItem(self.roi)
# # Get relevant video position and ROI.
# self.getSeqValsAndCurrROI()
# # Get rotated bounding box points
# ySorted = self.originalEggBoxPoints[np.argsort(self.originalEggBoxPoints[:, 1]), :]
# # Get bottom most, and top most sorted corner points
# bottomMost = ySorted[:2, :]
# topMost = ySorted[2:, :]
# # Get bottom most
# bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]
# (bl, br) = bottomMost
# # Use bottom-left coordinate as anchor to calculate the Euclidean distance between the
# # The point with the largest distance will be our bottom-right point
# D = dist.cdist(bl[np.newaxis], topMost, "euclidean")[0]
# (tl, tr) = topMost[np.argsort(D)[::-1], :]
# # Make ROI - note non 0,or 90 degree angles, require different of the X size
# # Rectangular ROI used to enable more easy handling of corner handles for tracking user chagnges.
# if (self.originalEggRotBBox[4] == -90.0) | (self.originalEggRotBBox[4] == -0.0)| (self.originalEggRotBBox[4] == 0.0):
# self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [self.originalEggRotBBox[2], self.originalEggRotBBox[3]])
# # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])
# else:
# # Random angle ROIs
# self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.originalEggRotBBox[2], self.originalEggRotBBox[3]])
# self.roi.setAngle(self.originalEggRotBBox[4], update=True)
# # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])
# # Add handles
# self.roi.addRotateHandle([1, 0],[0.5,0.5])
# self.roi.addRotateHandle([0, 1], [0.5,0.5])
# self.roi.addScaleHandle([1, 1], [0, 0])
# self.roi.addScaleHandle([0, 0], [1, 1])
# self.roi.setPen('y',width=3)
# self.roi.removable
# self.roi.invertible = 'True'
# # Make var for dealing with modifications to roi
# self.updatedEggROI=[]
# ### Still to do...
# self.diag.imv.addItem(self.roi)
# self.roi.sigRegionChangeFinished.connect(self.updateROI)
#==============================================================================
#===============
| 54.540984
| 329
| 0.545099
| 3,856
| 36,597
| 5.138745
| 0.111774
| 0.055514
| 0.076306
| 0.034418
| 0.686652
| 0.660207
| 0.640424
| 0.617159
| 0.605249
| 0.605047
| 0
| 0.020821
| 0.244064
| 36,597
| 671
| 330
| 54.540984
| 0.695427
| 0.409924
| 0
| 0.585227
| 0
| 0
| 0.012608
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048295
| false
| 0
| 0.056818
| 0
| 0.116477
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bcaf69ed2a6fded7e4b539b423940b33563b6d40
| 540
|
py
|
Python
|
tests/unittest/options/pricing/test_binomial_trees.py
|
yiluzhu/quant
|
784c5cc20eeded2ff684b464eec4744f000d9638
|
[
"MIT"
] | 1
|
2020-10-14T12:56:14.000Z
|
2020-10-14T12:56:14.000Z
|
tests/unittest/options/pricing/test_binomial_trees.py
|
yiluzhu/quant
|
784c5cc20eeded2ff684b464eec4744f000d9638
|
[
"MIT"
] | null | null | null |
tests/unittest/options/pricing/test_binomial_trees.py
|
yiluzhu/quant
|
784c5cc20eeded2ff684b464eec4744f000d9638
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from options.pricing.binomial_trees import BinomialTreePricer
from options.option import OptionType, Option
class BinomialTreeTestCase(TestCase):
def test_basic(self):
"""European option, spot price 50, strike price 52, risk free interest rate 5%
expiry 2 years, volatility 30%
"""
pricer = BinomialTreePricer(steps=100)
option = Option(OptionType.PUT, 50, 52, 0.05, 2, 0.3)
result = pricer.price_option(option)
self.assertEqual(6.7781, result)
| 30
| 86
| 0.698148
| 67
| 540
| 5.58209
| 0.656716
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061466
| 0.216667
| 540
| 17
| 87
| 31.764706
| 0.822695
| 0.196296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0.111111
| false
| 0
| 0.333333
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
bcb0236709da62fc588329e551c92b5fc621ffd9
| 2,927
|
py
|
Python
|
kafka/structs.py
|
informatique-cdc/kafka-python
|
d73bd6fc2f8825e2fddb7c4f091af7b266e37aea
|
[
"Apache-2.0"
] | 4,389
|
2015-06-12T06:00:10.000Z
|
2022-03-31T20:41:42.000Z
|
kafka/structs.py
|
informatique-cdc/kafka-python
|
d73bd6fc2f8825e2fddb7c4f091af7b266e37aea
|
[
"Apache-2.0"
] | 1,595
|
2015-12-02T20:58:22.000Z
|
2022-03-27T07:28:03.000Z
|
kafka/structs.py
|
informatique-cdc/kafka-python
|
d73bd6fc2f8825e2fddb7c4f091af7b266e37aea
|
[
"Apache-2.0"
] | 1,115
|
2015-12-02T23:17:52.000Z
|
2022-03-30T03:34:29.000Z
|
""" Other useful structs """
from __future__ import absolute_import
from collections import namedtuple
"""A topic and partition tuple
Keyword Arguments:
topic (str): A topic name
partition (int): A partition id
"""
TopicPartition = namedtuple("TopicPartition",
["topic", "partition"])
"""A Kafka broker metadata used by admin tools.
Keyword Arguments:
nodeID (int): The Kafka broker id.
host (str): The Kafka broker hostname.
port (int): The Kafka broker port.
rack (str): The rack of the broker, which is used to in rack aware
partition assignment for fault tolerance.
Examples: `RACK1`, `us-east-1d`. Default: None
"""
BrokerMetadata = namedtuple("BrokerMetadata",
["nodeId", "host", "port", "rack"])
"""A topic partition metadata describing the state in the MetadataResponse.
Keyword Arguments:
topic (str): The topic name of the partition this metadata relates to.
partition (int): The id of the partition this metadata relates to.
leader (int): The id of the broker that is the leader for the partition.
replicas (List[int]): The ids of all brokers that contain replicas of the
partition.
isr (List[int]): The ids of all brokers that contain in-sync replicas of
the partition.
error (KafkaError): A KafkaError object associated with the request for
this partition metadata.
"""
PartitionMetadata = namedtuple("PartitionMetadata",
["topic", "partition", "leader", "replicas", "isr", "error"])
"""The Kafka offset commit API
The Kafka offset commit API allows users to provide additional metadata
(in the form of a string) when an offset is committed. This can be useful
(for example) to store information about which node made the commit,
what time the commit was made, etc.
Keyword Arguments:
offset (int): The offset to be committed
metadata (str): Non-null metadata
"""
OffsetAndMetadata = namedtuple("OffsetAndMetadata",
# TODO add leaderEpoch: OffsetAndMetadata(offset, leaderEpoch, metadata)
["offset", "metadata"])
"""An offset and timestamp tuple
Keyword Arguments:
offset (int): An offset
timestamp (int): The timestamp associated to the offset
"""
OffsetAndTimestamp = namedtuple("OffsetAndTimestamp",
["offset", "timestamp"])
MemberInformation = namedtuple("MemberInformation",
["member_id", "client_id", "client_host", "member_metadata", "member_assignment"])
GroupInformation = namedtuple("GroupInformation",
["error_code", "group", "state", "protocol_type", "protocol", "members", "authorized_operations"])
"""Define retry policy for async producer
Keyword Arguments:
Limit (int): Number of retries. limit >= 0, 0 means no retries
backoff_ms (int): Milliseconds to backoff.
retry_on_timeouts:
"""
RetryOptions = namedtuple("RetryOptions",
["limit", "backoff_ms", "retry_on_timeouts"])
| 33.261364
| 102
| 0.702767
| 358
| 2,927
| 5.692737
| 0.374302
| 0.023553
| 0.027478
| 0.023553
| 0.102552
| 0.069676
| 0.069676
| 0.035329
| 0.035329
| 0
| 0
| 0.001697
| 0.194739
| 2,927
| 87
| 103
| 33.643678
| 0.862961
| 0.031773
| 0
| 0
| 0
| 0
| 0.415135
| 0.022703
| 0
| 0
| 0
| 0.011494
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bcb21686e2484863628d877e956c259a49e6e1be
| 2,542
|
py
|
Python
|
app/resources/magic_castle_api.py
|
ComputeCanada/mc-hub
|
92b4c212ba8f7b5b1c8b8700f981275605a07067
|
[
"BSD-3-Clause"
] | 5
|
2020-09-04T16:34:36.000Z
|
2020-09-25T19:14:59.000Z
|
app/resources/magic_castle_api.py
|
ComputeCanada/mc-hub
|
92b4c212ba8f7b5b1c8b8700f981275605a07067
|
[
"BSD-3-Clause"
] | 39
|
2020-09-12T17:37:14.000Z
|
2022-03-10T17:49:57.000Z
|
app/resources/magic_castle_api.py
|
ComputeCanada/mc-hub
|
92b4c212ba8f7b5b1c8b8700f981275605a07067
|
[
"BSD-3-Clause"
] | 1
|
2021-03-29T15:42:13.000Z
|
2021-03-29T15:42:13.000Z
|
from flask import request
from resources.api_view import ApiView
from exceptions.invalid_usage_exception import InvalidUsageException
from models.user.user import User
from models.user.authenticated_user import AuthenticatedUser
class MagicCastleAPI(ApiView):
def get(self, user: User, hostname):
if hostname:
magic_castle = user.get_magic_castle_by_hostname(hostname)
return magic_castle.dump_configuration()
else:
if type(user) == AuthenticatedUser:
return [
{
**magic_castle.dump_configuration(planned_only=True),
"hostname": magic_castle.get_hostname(),
"status": magic_castle.get_status().value,
"freeipa_passwd": magic_castle.get_freeipa_passwd(),
"owner": magic_castle.get_owner_username(),
}
for magic_castle in user.get_all_magic_castles()
]
else:
return [
{
**magic_castle.dump_configuration(planned_only=True),
"hostname": magic_castle.get_hostname(),
"status": magic_castle.get_status().value,
"freeipa_passwd": magic_castle.get_freeipa_passwd(),
}
for magic_castle in user.get_all_magic_castles()
]
def post(self, user: User, hostname, apply=False):
if apply:
magic_castle = user.get_magic_castle_by_hostname(hostname)
magic_castle.apply()
return {}
else:
magic_castle = user.create_empty_magic_castle()
json_data = request.get_json()
if not json_data:
raise InvalidUsageException("No json data was provided")
magic_castle.set_configuration(json_data)
magic_castle.plan_creation()
return {}
def put(self, user: User, hostname):
magic_castle = user.get_magic_castle_by_hostname(hostname)
json_data = request.get_json()
if not json_data:
raise InvalidUsageException("No json data was provided")
magic_castle.set_configuration(json_data)
magic_castle.plan_modification()
return {}
def delete(self, user: User, hostname):
magic_castle = user.get_magic_castle_by_hostname(hostname)
magic_castle.plan_destruction()
return {}
| 39.71875
| 77
| 0.5893
| 257
| 2,542
| 5.509728
| 0.249027
| 0.217514
| 0.093927
| 0.056497
| 0.658192
| 0.634181
| 0.634181
| 0.634181
| 0.634181
| 0.580508
| 0
| 0
| 0.335563
| 2,542
| 63
| 78
| 40.349206
| 0.838366
| 0
| 0
| 0.54386
| 0
| 0
| 0.043666
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070175
| false
| 0.035088
| 0.087719
| 0
| 0.298246
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bcbef8c15ce4fa1656c062f45eb901b87f935220
| 1,828
|
py
|
Python
|
musicLrc.py
|
xiangxing98/Rhythm-Enlightment
|
d6302321e858d07480b18e94c59de87f91c39202
|
[
"MIT"
] | null | null | null |
musicLrc.py
|
xiangxing98/Rhythm-Enlightment
|
d6302321e858d07480b18e94c59de87f91c39202
|
[
"MIT"
] | null | null | null |
musicLrc.py
|
xiangxing98/Rhythm-Enlightment
|
d6302321e858d07480b18e94c59de87f91c39202
|
[
"MIT"
] | null | null | null |
import time
musicLrc = """
[00:03.50]传奇
[00:19.10]作词:刘兵 作曲:李健
[00:20.60]演唱:王菲
[00:26.60]
[04:40.75][02:39.90][00:36.25]只是因为在人群中多看了你一眼
[04:49.00]
[02:47.44][00:43.69]再也没能忘掉你容颜
[02:54.83][00:51.24]梦想着偶然能有一天再相见
[03:02.32][00:58.75]从此我开始孤单思念
[03:08.15][01:04.30]
[03:09.35][01:05.50]想你时你在天边
[03:16.90][01:13.13]想你时你在眼前
[03:24.42][01:20.92]想你时你在脑海
[03:31.85][01:28.44]想你时你在心田
[03:38.67][01:35.05]
[04:09.96][03:39.87][01:36.25]宁愿相信我们前世有约
[04:16.37][03:46.38][01:42.47]今生的爱情故事 不会再改变
[04:24.82][03:54.83][01:51.18]宁愿用这一生等你发现
[04:31.38][04:01.40][01:57.43]我一直在你身旁 从未走远
[04:39.55][04:09.00][02:07.85]
"""
lrcDict = {}
musicLrcList = musicLrc.splitlines()
#print(musicLrcList)
for lrcLine in musicLrcList:
#[04:40.75][02:39.90][00:36.25]只是因为在人群中多看了你一眼
#[04:40.75 [02:39.90 [00:36.25 只是因为在人群中多看了你一眼
#[00:20.60]演唱:王菲
lrcLineList = lrcLine.split("]")
for index in range(len(lrcLineList) - 1):
timeStr = lrcLineList[index][1:]
#print(timeStr)
#00:03.50
timeList = timeStr.split(":")
timelrc = float(timeList[0]) * 60 + float(timeList[1])
#print(time)
lrcDict[timelrc] = lrcLineList[-1]
print(lrcDict)
allTimeList = []
for t in lrcDict:
allTimeList.append(t)
allTimeList.sort()
#print(allTimeList)
'''
while 1:
getTime = float(input("请输入一个时间"))
for n in range(len(allTimeList)):
tempTime = allTimeList[n]
if getTime < tempTime:
break
if n == 0:
print("时间太小")
else:
print(lrcDict[allTimeList[n - 1]])
'''
getTime = 0
while 1:
for n in range(len(allTimeList)):
tempTime = allTimeList[n]
if getTime < tempTime:
break
lrc = lrcDict.get(allTimeList[n - 1])
if lrc == None:
pass
else:
print(lrc)
time.sleep(1)
getTime += 1
| 22.292683
| 62
| 0.605033
| 297
| 1,828
| 3.723906
| 0.380471
| 0.014467
| 0.016275
| 0.0217
| 0.227848
| 0.209765
| 0.209765
| 0.209765
| 0.209765
| 0.209765
| 0
| 0.212225
| 0.203501
| 1,828
| 82
| 63
| 22.292683
| 0.54739
| 0.098468
| 0
| 0
| 0
| 0.04
| 0.413392
| 0.310044
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.02
| 0.02
| 0
| 0.02
| 0.04
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bcc231c6648af0cd64b843faf63ad79a79b6853b
| 895
|
py
|
Python
|
src/plugins/sjsy.py
|
2443391447/nonebot2
|
c9fa0c44c130b8a1425b2b71105fa909232c37b0
|
[
"MIT"
] | 1
|
2021-08-24T03:18:23.000Z
|
2021-08-24T03:18:23.000Z
|
src/plugins/sjsy.py
|
2443391447/nonebot2
|
c9fa0c44c130b8a1425b2b71105fa909232c37b0
|
[
"MIT"
] | null | null | null |
src/plugins/sjsy.py
|
2443391447/nonebot2
|
c9fa0c44c130b8a1425b2b71105fa909232c37b0
|
[
"MIT"
] | 1
|
2021-09-01T07:50:03.000Z
|
2021-09-01T07:50:03.000Z
|
from nonebot import on_keyword, on_command
from nonebot.typing import T_State
from nonebot.adapters.cqhttp import Message, Bot, Event # 这两个没用的别删
from nonebot.adapters.cqhttp.message import MessageSegment
import requests
from nonebot.permission import *
from nonebot.rule import to_me
from aiocqhttp.exceptions import Error as CQHttpError
sheying = on_keyword({'随机摄影'})
@sheying.handle()
async def main(bot: Bot, event: Event, state: T_State):
msg = await downloads()
try:
await sheying.send(message=Message(msg))
except CQHttpError:
pass
async def downloads():
url = "https://yanghanwen.xyz/tu/ren.php"
resp = requests.get(url).json()
url_ing = resp['data']
xians = f"[CQ:image,file={url_ing}]"
return xians
# await xians.send("正在爬取图片,请稍后……")
# await xians.send(MessageSegment.at(id) + xians + "精选摄影")
| 28.870968
| 68
| 0.689385
| 119
| 895
| 5.168067
| 0.529412
| 0.107317
| 0.061789
| 0.081301
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 895
| 31
| 69
| 28.870968
| 0.850559
| 0.109497
| 0
| 0
| 0
| 0
| 0.086387
| 0.032723
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.045455
| 0.363636
| 0
| 0.409091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
bccb37cf2799cc964344db7c5cf679594dae2889
| 2,252
|
py
|
Python
|
tests/test_api.py
|
ines/spacy-js
|
5b7a86cb0d1099285e01252f7e1d44a36ad9a07f
|
[
"MIT"
] | 141
|
2018-10-27T17:18:54.000Z
|
2022-03-31T11:08:02.000Z
|
tests/test_api.py
|
Fabulabs/spacy-js
|
c7a34298203d26b25f9eb1f6b9eb875faa33d144
|
[
"MIT"
] | 16
|
2018-10-27T21:44:36.000Z
|
2022-01-22T03:01:54.000Z
|
tests/test_api.py
|
Fabulabs/spacy-js
|
c7a34298203d26b25f9eb1f6b9eb875faa33d144
|
[
"MIT"
] | 22
|
2019-01-12T16:38:20.000Z
|
2022-03-14T19:11:38.000Z
|
# coding: utf8
from __future__ import unicode_literals
import pytest
import spacy
import json
from api.server import parse, doc2json, load_model
@pytest.fixture(scope="session")
def model():
return "en_core_web_sm"
@pytest.fixture(scope="session")
def text():
return "This is a sentence about Facebook. This is another one."
@pytest.fixture(scope="session")
def nlp(model):
return spacy.load(model)
@pytest.fixture(scope="session")
def doc(nlp, text):
return nlp(text)
def test_server_parse(model, text, doc):
load_model(model)
json_doc = parse(model, text)
direct_json_doc = doc2json(doc, model)
assert json.dumps(json_doc, sort_keys=True) == json.dumps(
direct_json_doc, sort_keys=True
)
def test_doc2json_doc_tokens(doc, model):
data = doc2json(doc, model)
assert data["model"] == model
assert data["doc"]["text"] == doc.text
assert data["doc"]["text_with_ws"] == doc.text_with_ws
assert data["doc"]["is_tagged"]
assert data["doc"]["is_parsed"]
assert data["doc"]["is_sentenced"]
assert len(data["tokens"]) == len(doc)
assert data["tokens"][0]["text"] == doc[0].text
assert data["tokens"][0]["head"] == doc[0].head.i
def test_doc2json_doc_ents(doc, model):
data = doc2json(doc, model)
ents = list(doc.ents)
assert "ents" in data
assert len(data["ents"]) == len(ents)
assert len(data["ents"]) >= 1
assert data["ents"][0]["start"] == ents[0].start
assert data["ents"][0]["end"] == ents[0].end
assert data["ents"][0]["label"] == ents[0].label_
def test_doc2json_doc_sents(doc, model):
data = doc2json(doc, model)
sents = list(doc.sents)
assert "sents" in data
assert len(data["sents"]) == len(sents)
assert len(data["sents"]) >= 1
assert data["sents"][0]["start"] == sents[0].start
assert data["sents"][0]["end"] == sents[0].end
def test_doc2json_doc_noun_chunks(doc, model):
data = doc2json(doc, model)
chunks = list(doc.noun_chunks)
assert "noun_chunks" in data
assert len(data["noun_chunks"]) == len(chunks)
assert len(data["noun_chunks"]) >= 1
assert data["noun_chunks"][0]["start"] == chunks[0].start
assert data["noun_chunks"][0]["end"] == chunks[0].end
| 27.463415
| 68
| 0.654085
| 330
| 2,252
| 4.321212
| 0.193939
| 0.105189
| 0.063815
| 0.070126
| 0.28892
| 0.130435
| 0.051893
| 0
| 0
| 0
| 0
| 0.017214
| 0.174512
| 2,252
| 81
| 69
| 27.802469
| 0.749866
| 0.005329
| 0
| 0.133333
| 0
| 0
| 0.143878
| 0
| 0
| 0
| 0
| 0
| 0.433333
| 1
| 0.15
| false
| 0
| 0.083333
| 0.066667
| 0.3
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bcd088f1e5c34ccfa8be8350d7cb0a6ebc06a38b
| 4,979
|
py
|
Python
|
HealthNet/prescriptions/views.py
|
jimga150/HealthNet
|
84e55302b02221ae6e93640904af837fdfe09a83
|
[
"MIT"
] | null | null | null |
HealthNet/prescriptions/views.py
|
jimga150/HealthNet
|
84e55302b02221ae6e93640904af837fdfe09a83
|
[
"MIT"
] | null | null | null |
HealthNet/prescriptions/views.py
|
jimga150/HealthNet
|
84e55302b02221ae6e93640904af837fdfe09a83
|
[
"MIT"
] | null | null | null |
from django.shortcuts import redirect
from .forms import PrescriptionForm
from core.views import is_doctor, is_nurse, is_admin, is_patient
from core.models import *
from .models import Prescription
from django.contrib.auth.decorators import login_required, user_passes_test
from django.utils import timezone
from django.shortcuts import render
from django.core.urlresolvers import reverse
def not_admin(user):
"""
:param user: The User in question
:return: True if the user is anything but an Admin
"""
return not is_admin(user)
def is_doctor_or_nurse(user):
"""
:param user: The User in question
:return: True if the user is a Doctor or Nurse
"""
return is_doctor(user) or is_nurse(user)
@login_required
@user_passes_test(is_doctor)
def new_prescription(request):
"""
Page for the form a doctor fills out to prescribe a drug
:param request: the request with possible form submission
:return: Prescription form or redirect to listing page (below)
"""
if request.method == 'POST':
prescription_form = PrescriptionForm(data=request.POST)
validity = prescription_form.is_valid()
if validity:
prescription = prescription_form.save(commit=False)
prescription.date_prescribed = timezone.now()
prescription.doctor = Doctor.objects.all().get(user=request.user)
prescription.save()
log = Log.objects.create_Log(request.user, request.user.username, timezone.now(),
"Prescription filled out")
log.save()
else:
print("Error")
print(prescription_form.errors)
if 'submit_singular' in request.POST and validity:
return redirect('prescriptions')
elif 'submit_another' in request.POST:
prescription_form = PrescriptionForm()
else:
prescription_form = PrescriptionForm()
context = {"prescription_form": prescription_form}
return render(request, 'prescriptions/makenew.html', context)
def get_prescription_list_for(cpatient):
"""
Generic getter for a specific patient's prescription list
:param cpatient: Patient to fetch list for
:return: context of Prescription list
"""
Prescriptions = Prescription.objects.all().filter(patient=cpatient)
per = []
for p in Prescriptions.iterator():
per.append(str(dict(p.TIME_CHOICES)[p.Time_units]))
p_list = zip(Prescriptions, per)
return {"Labels": ["Doctor", "Drug", "Dosage", "Rate"], "Name": str(cpatient), "Prescriptions": p_list}
@login_required
@user_passes_test(not_admin)
def prescriptions(request):
"""
Lists either all patients in the hospital with links to their prescription lists, or the prescriptions applied to a
single defined patient.
:param request: The request sent in, not used here
:return: List page rendering
"""
context = {}
if is_doctor(request.user) or is_nurse(request.user):
context["Labels"] = ["Name", "Prescriptions"]
patients = Patient.objects.all()
prescription_nums = []
for pat in patients.iterator():
prescription_nums.append(Prescription.objects.filter(patient=pat).count())
context["Patients"] = zip(patients, prescription_nums)
elif is_patient(request.user):
cpatient = Patient.objects.get(user=request.user)
context = get_prescription_list_for(cpatient)
context["is_doctor"] = is_doctor(request.user)
context["is_doctor"] = is_doctor(request.user)
return render(request, 'prescriptions/list.html', context)
@login_required
@user_passes_test(is_doctor_or_nurse)
def prescriptions_list(request, patient_id):
"""
Page that doctors and nurses are sent to when accessing a single patient's prescription list.
:param request: The request sent in, not used here
:param patient_id: ID of the patient who's being listed
:return: List page rendering
"""
cpatient = Patient.objects.get(pk=patient_id)
context = get_prescription_list_for(cpatient)
context["is_doctor"] = is_doctor(request.user)
return render(request, 'prescriptions/list.html', context)
@login_required
@user_passes_test(is_doctor)
def delete_prescription(request, prescription_id):
"""
Page for confirming/deleting a single prescription
:param request: The request sent in, not used here
:param prescription_id: ID number of the prescription in question
:return: Redirect or confirmation page
"""
prescription = Prescription.objects.get(pk=prescription_id)
patient_id = prescription.patient.id
if request.method == 'POST':
prescription.delete()
return redirect(reverse('list prescriptions for patient', kwargs={'patient_id': patient_id}))
context = {"Prescription": prescription, 'patient_id': patient_id}
return render(request, 'prescriptions/delete.html', context)
| 33.193333
| 119
| 0.69753
| 612
| 4,979
| 5.537582
| 0.236928
| 0.030688
| 0.025081
| 0.033933
| 0.238713
| 0.179994
| 0.179994
| 0.179994
| 0.167896
| 0.156388
| 0
| 0
| 0.207672
| 4,979
| 149
| 120
| 33.416107
| 0.859062
| 0.228962
| 0
| 0.240506
| 0
| 0
| 0.096651
| 0.026409
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088608
| false
| 0.063291
| 0.113924
| 0
| 0.316456
| 0.025316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
bcd88cb9aee8377371dcb96cf615ef4e2ec10580
| 4,113
|
py
|
Python
|
exercises/level_0/stringing.py
|
eliranM98/python_course
|
d9431dd6c0f27fca8ca052cc2a821ed0b883136c
|
[
"MIT"
] | 6
|
2019-03-29T06:14:53.000Z
|
2021-10-15T23:42:36.000Z
|
exercises/level_0/stringing.py
|
eliranM98/python_course
|
d9431dd6c0f27fca8ca052cc2a821ed0b883136c
|
[
"MIT"
] | 4
|
2019-09-06T10:03:40.000Z
|
2022-03-11T23:30:55.000Z
|
exercises/level_0/stringing.py
|
eliranM98/python_course
|
d9431dd6c0f27fca8ca052cc2a821ed0b883136c
|
[
"MIT"
] | 12
|
2019-06-20T19:34:52.000Z
|
2021-10-15T23:42:39.000Z
|
text = '''
Victor Hugo's ({}) tale of injustice, heroism and love follows the fortunes of Jean Valjean, an escaped convict determined to put his criminal past behind him. But his attempts to become a respected member of the community are constantly put under threat: by his own conscience, when, owing to a case of mistaken identity, another man is arrested in his place; and by the relentless investigations of the dogged Inspector Javert. It is not simply for himself that Valjean must stay free, however, for he has sworn to protect the baby daughter of Fantine, driven to prostitution by poverty.
Norman Denny's ({}) lively English translation is accompanied by an introduction discussing Hugo's political and artistic aims in writing Les Miserables.
Victor Hugo (1802-85) wrote volumes of criticism, dramas, satirical verse and political journalism but is best remembered for his novels, especially Notre-Dame de Paris (also known as The Hunchback of Notre-Dame) and Les Miserables, which was adapted into one of the most successful musicals of all time.
'All human life is here'
Cameron Mackintosh, producer of the musical Les Miserables
'One of the half-dozen greatest novels of the world'
Upton Sinclair
'A great writer - inventive, witty, sly, innovatory'
A. S. Byatt, author of Possession
'''
name = 'Victor'
word1 = 'writer'
word2 = 'witty'
numbers = "0123456789"
small_letters = 'abcdefghijklmnopqrstuvwxyz'
big_letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
name_index = text.find(name)
name_plus3 = text[name_index: name_index+len(name)+3]
word1_index = text.find(word1, 0, 100)
word2_index = text.find(word2, int(len(text)/2), len(text))
count_characters = text.count('of')
is_text_starts_with_name = text.startswith(name)
is_text_ends_with_name = text.endswith(name)
text = text.format('1822-95', '1807-63')
words = text.split(' ')
text1 = ''.join(words)
text2 = ','.join(words)
text3 = '_'.join(words)
text4 = ' '.join(words)
text5 = text.replace('of', '@🐔')
text6 = text.capitalize()
text7 = text.replace('a', '')
text8 = text.strip()
upper_name = name.upper()
lower_name = name.lower()
is_name_upper = name.isupper()
is_name_lower = name.islower()
is_big_letters_upper = big_letters.isupper()
is_small_letters_lower = small_letters.islower()
stringed_integer = '90'.isnumeric()
stringed_float = '90.5'.isnumeric()
converted_int = int('90')
converted_float = float('90.5')
converted_string = str(183)
is_digit = converted_string[1].isdigit()
edges = small_letters[0] + big_letters[-1]
body = numbers[1:-1]
evens = numbers[::2]
odds = numbers[1::2]
print('name', name)
print('word1', word1)
print('word2', word2)
print('numbers', numbers)
print('small_letters', small_letters)
print('big_letters', big_letters)
print('name_index', name_index)
print('name_plus3', name_plus3)
print('word1_index', word1_index)
print('word2_index', word2_index)
print('count_characters -> \'of\' in the text', count_characters)
print('is_text_starts_with_name', is_text_starts_with_name)
print('is_text_ends_with_name', is_text_ends_with_name)
print('\n\n\n\n\n', 'text', text, '\n\n\n\n\n')
print('\n\n\n\n\n', 'words', words, '\n\n\n\n\n')
print('\n\n\n\n\n', 'text1', text1, '\n\n\n\n\n')
print('\n\n\n\n\n', 'text2', text2, '\n\n\n\n\n')
print('\n\n\n\n\n', 'text3', text3, '\n\n\n\n\n')
print('\n\n\n\n\n', 'text4', text4, '\n\n\n\n\n')
print('\n\n\n\n\n', 'text5', text5, '\n\n\n\n\n')
print('\n\n\n\n\n', 'text6', text6, '\n\n\n\n\n')
print('\n\n\n\n\n', 'text7', text7, '\n\n\n\n\n')
print('\n\n\n\n\n', 'text8', text8, '\n\n\n\n\n')
print('upper_name', upper_name)
print('lower_name', lower_name)
print('is_name_upper', is_name_upper)
print('is_name_lower', is_name_lower)
print('is_big_letters_upper', is_big_letters_upper)
print('is_small_letters_lower', is_small_letters_lower)
print('stringed_integer', stringed_integer)
print('stringed_float', stringed_float)
print('converted_int', converted_int)
print('converted_float', converted_float)
print('converted_string', converted_string)
print('is_digit', is_digit)
print('edges', edges)
print('body', body)
print('evens', evens)
print('odds', odds)
| 41.545455
| 590
| 0.735959
| 680
| 4,113
| 4.295588
| 0.310294
| 0.054776
| 0.061623
| 0.054776
| 0.093461
| 0.068127
| 0.046217
| 0.046217
| 0.046217
| 0.046217
| 0
| 0.026237
| 0.110382
| 4,113
| 98
| 591
| 41.969388
| 0.771796
| 0
| 0
| 0
| 0
| 0.033708
| 0.490396
| 0.029176
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.438202
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
bce0bfd9222f594d713d4743ed32c26bb4279c4c
| 1,483
|
py
|
Python
|
check_perm.py
|
codecakes/random_games
|
1e670021ec97a196726e937e658878dc63ba9d34
|
[
"MIT"
] | null | null | null |
check_perm.py
|
codecakes/random_games
|
1e670021ec97a196726e937e658878dc63ba9d34
|
[
"MIT"
] | null | null | null |
check_perm.py
|
codecakes/random_games
|
1e670021ec97a196726e937e658878dc63ba9d34
|
[
"MIT"
] | null | null | null |
"""
PermCheck
Check whether array A is a permutation.
https://codility.com/demo/results/demoANZ7M2-GFU/
Task description
A non-empty zero-indexed array A consisting of N integers is given.
A permutation is a sequence containing each element from 1 to N once, and only once.
For example, array A such that:
A[0] = 4
A[1] = 1
A[2] = 3
A[3] = 2
is a permutation, but array A such that:
A[0] = 4
A[1] = 1
A[2] = 3
is not a permutation, because value 2 is missing.
The goal is to check whether array A is a permutation.
Write a function:
def solution(A)
that, given a zero-indexed array A, returns 1 if array A is a permutation and 0 if it is not.
For example, given array A such that:
A[0] = 4
A[1] = 1
A[2] = 3
A[3] = 2
the function should return 1.
Given array A such that:
A[0] = 4
A[1] = 1
A[2] = 3
the function should return 0.
Assume that:
N is an integer within the range [1..100,000];
each element of array A is an integer within the range [1..1,000,000,000].
Complexity:
expected worst-case time complexity is O(N);
expected worst-case space complexity is O(N), beyond input storage (not counting the storage required for input arguments).
Elements of input arrays can be modified.
"""
def solution(A):
# write your code in Python 2.7
s = set(A)
N_set = len(s) #O(n)
N = len(A)
if N != N_set: return 0
sum_N = N*(N+1)/2 #O(1)
sum_A = sum(A) #O(n)
return 1 if sum_N == sum_A else 0
| 27.981132
| 123
| 0.662171
| 285
| 1,483
| 3.424561
| 0.329825
| 0.061475
| 0.032787
| 0.057377
| 0.25
| 0.229508
| 0.229508
| 0.110656
| 0.110656
| 0.110656
| 0
| 0.054916
| 0.238705
| 1,483
| 53
| 124
| 27.981132
| 0.809566
| 0.864464
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bce207bdb62870e146b1e56a1b168c691c0515ac
| 4,273
|
py
|
Python
|
utils/scene_bounding_box.py
|
davidemarelli/sfm_flow
|
7a96d8309cc01b8499347ba0cae882923d82bbcc
|
[
"MIT"
] | 8
|
2020-10-27T12:52:17.000Z
|
2022-03-30T04:15:37.000Z
|
utils/scene_bounding_box.py
|
ElsevierSoftwareX/SOFTX_2020_51
|
b240a113c91405fac60444a6e56e87e3cf17a27b
|
[
"MIT"
] | 1
|
2020-11-09T01:56:04.000Z
|
2020-11-24T15:58:26.000Z
|
utils/scene_bounding_box.py
|
davidemarelli/sfm_flow
|
7a96d8309cc01b8499347ba0cae882923d82bbcc
|
[
"MIT"
] | 2
|
2021-12-02T10:04:39.000Z
|
2022-03-28T07:54:07.000Z
|
import logging
from typing import Tuple
import bpy
from mathutils import Vector
from .object import get_objs
logger = logging.getLogger(__name__)
class SceneBoundingBox():
"""Scene bounding box, build a bounding box that includes all objects except the excluded ones."""
################################################################################################
# Properties
#
# ==============================================================================================
@property
def width(self):
"""Scene's bounding box width."""
return self.x_max - self.x_min
# ==============================================================================================
@property
def depth(self):
"""Scene's bounding box depth."""
return self.y_max - self.y_min
# ==============================================================================================
@property
def height(self):
"""Scene's bounding box height."""
return self.z_max - self.z_min
# ==============================================================================================
@property
def floor_center(self):
"""Scene's bounding center on lower bbox plane."""
return Vector((self.center[0], self.center[1], self.z_min))
################################################################################################
# Constructor
#
# ==============================================================================================
def __init__(self, scene: bpy.types.Scene,
exclude_collections: Tuple[str] = ("SfM_Environment", "SfM_Reconstructions")):
self.scene = scene
self.exclude_collections = exclude_collections
#
self.center = Vector() # type: Vector
self.x_min = float("inf") # type: float
self.x_max = float("-inf") # type: float
self.y_min = float("inf") # type: float
self.y_max = float("-inf") # type: float
self.z_min = float("inf") # type: float
self.z_max = float("-inf") # type: float
#
self.compute()
################################################################################################
# Methods
#
# ==============================================================================================
def compute(self):
"""Compute the scene bounding box values."""
objs = get_objs(self.scene, exclude_collections=self.exclude_collections, mesh_only=True)
logger.debug("Found %i objects in scene %s", len(objs), self.scene.name)
for obj in objs:
obb = obj.bound_box
for i in range(8):
p = obj.matrix_world @ Vector(obb[i])
self.x_min = min(self.x_min, p[0])
self.x_max = max(self.x_max, p[0])
self.y_min = min(self.y_min, p[1])
self.y_max = max(self.y_max, p[1])
self.z_min = min(self.z_min, p[2])
self.z_max = max(self.z_max, p[2])
if objs:
self.center = Vector(((self.x_max + self.x_min) / 2,
(self.y_max + self.y_min) / 2,
(self.z_max + self.z_min) / 2))
logger.debug(str(self))
# ==============================================================================================
def get_min_vector(self):
"""Get minimum axis."""
return Vector((self.x_min, self.y_min, self.z_min))
# ==============================================================================================
def get_max_vector(self):
"""Get maximum axis."""
return Vector((self.x_max, self.y_max, self.z_max))
################################################################################################
# Builtin methods
#
# ==============================================================================================
def __str__(self):
return "Scene bbox values: X=({:.3f}, {:.3f}), Y=({:.3f}, {:.3f}), Z=({:.3f}, {:.3f}), Center={}".format(
self.x_min, self.x_max, self.y_min, self.y_max, self.z_min, self.z_max, self.center)
| 39.934579
| 113
| 0.388018
| 402
| 4,273
| 3.945274
| 0.226368
| 0.047289
| 0.040353
| 0.064313
| 0.250315
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0.00545
| 0.227007
| 4,273
| 106
| 114
| 40.311321
| 0.47472
| 0.300725
| 0
| 0.071429
| 0
| 0.017857
| 0.067402
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.160714
| false
| 0
| 0.089286
| 0.017857
| 0.392857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bce8b5c80fccdda525f4313d1b8dac7df83862d2
| 3,765
|
py
|
Python
|
scraper-code/myanimelist/base.py
|
XueAlfred/MALAnalysis
|
630d578b30f7540769774e1e4ee072d9775bf4bf
|
[
"MIT"
] | 15
|
2015-01-24T10:52:42.000Z
|
2021-08-09T10:23:58.000Z
|
scraper-code/myanimelist/base.py
|
XueAlfred/MALAnalysis
|
630d578b30f7540769774e1e4ee072d9775bf4bf
|
[
"MIT"
] | 10
|
2015-01-24T10:51:18.000Z
|
2018-09-05T00:17:03.000Z
|
scraper-code/myanimelist/base.py
|
XueAlfred/MALAnalysis
|
630d578b30f7540769774e1e4ee072d9775bf4bf
|
[
"MIT"
] | 18
|
2015-01-24T11:29:38.000Z
|
2021-12-04T02:41:09.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import abc
import bs4
import functools
import utilities
class Error(Exception):
"""Base exception class that takes a message to display upon raising.
"""
def __init__(self, message=None):
"""Creates an instance of Error.
:type message: str
:param message: A message to display when raising the exception.
"""
super(Error, self).__init__()
self.message = message
def __str__(self):
return unicode(self.message) if self.message is not None else u""
class MalformedPageError(Error):
"""Indicates that a page on MAL has broken markup in some way.
"""
def __init__(self, id, html, message=None):
super(MalformedPageError, self).__init__(message=message)
if isinstance(id, unicode):
self.id = id
else:
self.id = str(id).decode(u'utf-8')
if isinstance(html, unicode):
self.html = html
else:
self.html = str(html).decode(u'utf-8')
def __str__(self):
return "\n".join([
super(MalformedPageError, self).__str__(),
"ID: " + self.id,
"HTML: " + self.html
]).encode(u'utf-8')
class InvalidBaseError(Error):
"""Indicates that the particular resource instance requested does not exist on MAL.
"""
def __init__(self, id, message=None):
super(InvalidBaseError, self).__init__(message=message)
self.id = id
def __str__(self):
return "\n".join([
super(InvalidBaseError, self).__str__(),
"ID: " + unicode(self.id)
])
def loadable(func_name):
"""Decorator for getters that require a load() upon first access.
:type func_name: function
:param func_name: class method that requires that load() be called if the class's _attribute value is None
:rtype: function
:return: the decorated class method.
"""
def inner(func):
cached_name = '_' + func.__name__
@functools.wraps(func)
def _decorator(self, *args, **kwargs):
if getattr(self, cached_name) is None:
getattr(self, func_name)()
return func(self, *args, **kwargs)
return _decorator
return inner
class Base(object):
"""Abstract base class for MAL resources. Provides autoloading, auto-setting functionality for other MAL objects.
"""
__metaclass__ = abc.ABCMeta
"""Attribute name for primary reference key to this object.
When an attribute by the name given by _id_attribute is passed into set(), set() doesn't prepend an underscore for load()ing.
"""
_id_attribute = "id"
def __repr__(self):
return u"".join([
"<",
self.__class__.__name__,
" ",
self._id_attribute,
": ",
unicode(getattr(self, self._id_attribute)),
">"
])
def __hash__(self):
return hash('-'.join([self.__class__.__name__, unicode(getattr(self, self._id_attribute))]))
def __eq__(self, other):
return isinstance(other, self.__class__) and getattr(self, self._id_attribute) == getattr(other, other._id_attribute)
def __ne__(self, other):
return not self.__eq__(other)
def __init__(self, session):
"""Create an instance of Base.
:type session: :class:`myanimelist.session.Session`
:param session: A valid MAL session.
"""
self.session = session
@abc.abstractmethod
def load(self):
"""A callback to run before any @loadable attributes are returned.
"""
pass
def set(self, attr_dict):
"""Sets attributes of this user object.
:type attr_dict: dict
:param attr_dict: Parameters to set, with attribute keys.
:rtype: :class:`.Base`
:return: The current object.
"""
for key in attr_dict:
if key == self._id_attribute:
setattr(self, self._id_attribute, attr_dict[key])
else:
setattr(self, u"_" + key, attr_dict[key])
return self
| 27.683824
| 127
| 0.662151
| 493
| 3,765
| 4.791075
| 0.298174
| 0.033023
| 0.038103
| 0.032176
| 0.063506
| 0.052498
| 0.052498
| 0
| 0
| 0
| 0
| 0.001691
| 0.214608
| 3,765
| 136
| 128
| 27.683824
| 0.797092
| 0.290571
| 0
| 0.155844
| 0
| 0
| 0.018143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.207792
| false
| 0.012987
| 0.051948
| 0.090909
| 0.480519
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bcf633a886ab43d9c3c7c35185345d3f776c81e3
| 4,899
|
py
|
Python
|
src/promnesia/sources/telegram.py
|
halhenke/promnesia
|
03f46b7e0740790ef091e6f48d0ac2e6bf05bcb7
|
[
"MIT"
] | 1,327
|
2019-11-02T20:10:38.000Z
|
2022-03-29T16:58:36.000Z
|
src/promnesia/sources/telegram.py
|
halhenke/promnesia
|
03f46b7e0740790ef091e6f48d0ac2e6bf05bcb7
|
[
"MIT"
] | 157
|
2019-09-06T11:16:40.000Z
|
2022-03-27T20:01:52.000Z
|
src/promnesia/sources/telegram.py
|
halhenke/promnesia
|
03f46b7e0740790ef091e6f48d0ac2e6bf05bcb7
|
[
"MIT"
] | 60
|
2020-06-08T22:12:24.000Z
|
2022-03-22T16:57:22.000Z
|
'''
Uses [[https://github.com/fabianonline/telegram_backup#readme][telegram_backup]] database for messages data
'''
from pathlib import Path
from textwrap import dedent
from typing import Optional, Union, TypeVar
from urllib.parse import unquote # TODO mm, make it easier to rememember to use...
from ..common import PathIsh, Visit, get_logger, Loc, extract_urls, from_epoch, Results, echain
# TODO potentially, belongs to my. package
# TODO kython?
T = TypeVar("T")
def unwrap(res: Union[T, Exception]) -> T:
if isinstance(res, Exception):
raise res
else:
return res
# TODO move to common?
def dataset_readonly(db: Path):
import dataset # type: ignore
# see https://github.com/pudo/dataset/issues/136#issuecomment-128693122
import sqlite3
creator = lambda: sqlite3.connect(f'file:{db}?immutable=1', uri=True)
return dataset.connect('sqlite:///' , engine_kwargs={'creator': creator})
def index(database: PathIsh, *, http_only: bool=None) -> Results:
"""
:param database:
the path of the sqlite generated by the _telegram_backup_ java program
:param http_only:
when true, do not collect IP-addresses and `python.py` strings
"""
logger = get_logger()
path = Path(database)
assert path.is_file(), path # TODO could check is_file inside `dataset_readonly()`
def make_query(text_query: str):
extra_criteria = "AND (M.has_media == 1 OR text LIKE '%http%')" if http_only else ""
return dedent(
f"""
WITH entities AS (
SELECT 'dialog' as type
, id
, coalesce(username, id) as handle
, coalesce(first_name || " " || last_name
, username
, id
) as display_name FROM users
UNION
SELECT 'group' as type
, id
, id as handle
, coalesce(name, id) as display_name FROM chats
)
SELECT src.display_name AS chatname
, src.handle AS chat
, snd.display_name AS sender
, M.time AS time
, {text_query} AS text
, M.id AS mid
FROM messages AS M
/* chat types are 'dialog' (1-1), 'group' and 'supergroup' */
/* this is abit hacky way to handle all groups in one go */
LEFT JOIN entities AS src ON M.source_id = src.id AND src.type = (CASE M.source_type WHEN 'supergroup' THEN 'group' ELSE M.source_type END)
LEFT JOIN entities AS snd ON M.sender_id = snd.id AND snd.type = 'dialog'
WHERE
M.message_type NOT IN ('service_message', 'empty_message')
{extra_criteria}
ORDER BY time;
""")
# TODO context manager?
with dataset_readonly(path) as db:
# TODO yield error if chatname or chat or smth else is null?
for row in db.query(make_query('M.text')):
try:
yield from _handle_row(row)
except Exception as ex:
yield echain(RuntimeError(f'While handling {row}'), ex)
# , None, sys.exc_info()[2]
# TODO hmm. traceback isn't preserved; wonder if that's because it's too heavy to attach to every single exception object..
# old (also 'stable') version doesn't have 'json' column yet...
if 'json' in db['messages'].columns:
for row in db.query(make_query("json_extract(json, '$.media.webpage.description')")):
try:
yield from _handle_row(row)
except Exception as ex:
yield echain(RuntimeError(f'While handling {row}'), ex)
def _handle_row(row) -> Results:
text = row['text']
if text is None:
return
urls = extract_urls(text)
if len(urls) == 0:
return
dt = from_epoch(row['time'])
mid: str = unwrap(row['mid'])
# TODO perhaps we could be defensive with null sender/chat etc and still emit the Visit
sender: str = unwrap(row['sender'])
chatname: str = unwrap(row['chatname'])
chat: str = unwrap(row['chat'])
in_context = f'https://t.me/{chat}/{mid}'
for u in urls:
# https://www.reddit.com/r/Telegram/comments/6ufwi3/link_to_a_specific_message_in_a_channel_possible/
# hmm, only seems to work on mobile app, but better than nothing...
yield Visit(
url=unquote(u),
dt=dt,
context=f"{sender}: {text}",
locator=Loc.make(
title=f"chat with {chatname}",
href=in_context,
),
)
| 37.684615
| 154
| 0.560931
| 606
| 4,899
| 4.437294
| 0.40429
| 0.007438
| 0.017851
| 0.013388
| 0.095203
| 0.081071
| 0.081071
| 0.063221
| 0.063221
| 0.063221
| 0
| 0.006813
| 0.340886
| 4,899
| 129
| 155
| 37.976744
| 0.825952
| 0.221678
| 0
| 0.146067
| 0
| 0.022472
| 0.464523
| 0.013553
| 0
| 0
| 0
| 0.015504
| 0.011236
| 1
| 0.05618
| false
| 0
| 0.078652
| 0
| 0.191011
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bcf9e42ce187d88ea3d733bded3e343188bcd463
| 10,196
|
py
|
Python
|
daproli/transformer.py
|
ermshaua/daproli
|
c1f7aeec431d9c60ae06eeac23455c1a03bc82cf
|
[
"BSD-3-Clause"
] | null | null | null |
daproli/transformer.py
|
ermshaua/daproli
|
c1f7aeec431d9c60ae06eeac23455c1a03bc82cf
|
[
"BSD-3-Clause"
] | null | null | null |
daproli/transformer.py
|
ermshaua/daproli
|
c1f7aeec431d9c60ae06eeac23455c1a03bc82cf
|
[
"BSD-3-Clause"
] | null | null | null |
from joblib import Parallel, delayed
from tqdm import tqdm
from .processing import map, filter, split, expand, combine, join
from .manipulation import windowed, flatten
class BaseTransformer:
'''
The BaseTransformer defines a generic data transformation pattern that
can be implemented with a number of data processing concepts.
'''
def transform(self, data, *args, **kwargs):
raise NotImplementedError()
class Mapper(BaseTransformer):
def __init__(self, func, ret_type=None, expand_args=True, n_jobs=1, verbose=0, **kwargs):
'''
dp.Mapper is the respective transformer for dp.map.
Parameters
-----------
:param func: the mapping function
:param ret_type: if provided the used return type, otherwise ret_type(data)
:param expand_args: true if args should be expanded, false otherwise
:param n_jobs: amount of used threads/processes
:param verbose: verbosity level for tqdm / joblib
:param kwargs: additional arguments for joblib.Parallel, e.g. backend='loky'
'''
self.func = func
self.ret_type = ret_type
self.expand_args = expand_args
self.n_jobs = n_jobs
self.verbose = verbose
self.kwargs = kwargs
def transform(self, data, *args, **kwargs):
return map(self.func, data, self.ret_type, expand_args=self.expand_args, n_jobs=self.n_jobs,
verbose=self.verbose, **self.kwargs)
class Filter(BaseTransformer):
def __init__(self, pred, ret_type=None, expand_args=True, n_jobs=1, verbose=0, **kwargs):
'''
dp.Filter is the respective transformer for dp.filter.
Parameters
-----------
:param pred: the filter predicate
:param ret_type: if provided the used return type, otherwise ret_type(data)
:param expand_args: true if args should be expanded, false otherwise
:param n_jobs: amount of used threads/processes
:param verbose: verbosity level for tqdm / joblib
:param kwargs: additional arguments for joblib.Parallel, e.g. backend='loky'
'''
self.pred = pred
self.ret_type = ret_type
self.expand_args = expand_args
self.n_jobs = n_jobs
self.verbose = verbose
self.kwargs = kwargs
def transform(self, data, *args, **kwargs):
return filter(self.pred, data, ret_type=self.ret_type, expand_args=self.expand_args, n_jobs=self.n_jobs,
verbose=self.verbose, **self.kwargs)
class Splitter(BaseTransformer):
def __init__(self, func, ret_type=None, return_labels=False, expand_args=True, n_jobs=1, verbose=0, **kwargs):
'''
dp.Splitter is the respective transformer for dp.split.
Parameters
-----------
:param func: the discriminator function
:param ret_type: if provided the used return type, otherwise ret_type(data)
:param return_labels: true if the associated labels should be returned, false otherwise
:param expand_args: true if args should be expanded, false otherwise
:param n_jobs: amount of used threads/processes
:param verbose: verbosity level for tqdm / joblib
:param kwargs: additional arguments for joblib.Parallel, e.g. backend='loky'
'''
self.func = func
self.ret_type = ret_type
self.return_labels = return_labels
self.expand_args = expand_args
self.n_jobs = n_jobs
self.verbose = verbose
self.kwargs = kwargs
def transform(self, data, *args, **kwargs):
return split(self.func, data, ret_type=self.ret_type, return_labels=self.return_labels,
expand_args=self.expand_args, n_jobs=self.n_jobs, verbose=self.verbose, **self.kwargs)
class Expander(BaseTransformer):
def __init__(self, func, ret_type=None, expand_args=True, n_jobs=1, verbose=0, **kwargs):
'''
dp.Expander is the respective transformer for dp.expand.
Parameters
-----------
:param func: the expansion function
:param ret_type: if provided the used return type, otherwise ret_type(data)
:param expand_args: true if args should be expanded, false otherwise
:param n_jobs: amount of used threads/processes
:param verbose: verbosity level for tqdm / joblib
:param kwargs: additional arguments for joblib.Parallel, e.g. backend='loky'
'''
self.func = func
self.ret_type = ret_type
self.expand_args = expand_args
self.n_jobs = n_jobs
self.verbose = verbose
self.kwargs = kwargs
def transform(self, data, *args, **kwargs):
return expand(self.func, data, ret_type=self.ret_type, expand_args=self.expand_args, n_jons=self.n_jobs,
verbose=self.verbose, **self.kwargs)
class Combiner(BaseTransformer):
def __init__(self, func, expand_args=True, n_jobs=1, verbose=0, **kwargs):
'''
dp.Combiner is the respective transformer for dp.combine.
Parameters
-----------
:param func: the combination function
:param expand_args: true if args should be expanded, false otherwise
:param n_jobs: amount of used threads/processes
:param verbose: verbosity level for tqdm / joblib
:param kwargs: additional arguments for joblib.Parallel, e.g. backend='loky'
'''
self.func = func
self.expand_args = expand_args
self.n_jobs = n_jobs
self.verbose = verbose
self.kwargs = kwargs
def transform(self, data, *args, **kwargs):
return combine(self.func, *data, expand_args=self.expand_args, n_jobs=self.n_jobs, verbose=self.verbose, **self.kwargs)
class Joiner(BaseTransformer):
def __init__(self, func, expand_args=True, n_jobs=1, verbose=0, **kwargs):
'''
dp.Joiner is the respective transformer for dp.join.
Parameters
-----------
:param func: the join function
:param expand_args: true if args should be expanded, false otherwise
:param n_jobs: amount of used threads/processes
:param verbose: verbosity level for tqdm / joblib
:param kwargs: additional arguments for joblib.Parallel, e.g. backend='loky'
'''
self.func = func
self.expand_args = expand_args
self.n_jobs = n_jobs
self.verbose = verbose
self.kwargs = kwargs
def transform(self, data, *args, **kwargs):
return join(self.func, *data, expand_args=self.expand_args, n_jobs=self.n_jobs, verbose=self.verbose, **self.kwargs)
class Manipulator(BaseTransformer):
def __init__(self, func, void=False, *args, **kwargs):
'''
dp.Manipulator is a transformer to manipulate the entire collection of data items.
Parameters
-----------
:param func: the manipulation function
:param void: if true the result is not returned
:param args: additional args for func
:param kwargs: additional kwargs for func
'''
self.func = func
self.void = void
self.args = args
self.kwargs = kwargs
def transform(self, data, *args, **kwargs):
res = self.func(data, *self.args, **self.kwargs)
return res if self.void is False else data
class Window(BaseTransformer):
def __init__(self, size, step=1, ret_type=None):
'''
dp.Window is the respective transformer for dp.windowed.
Parameters
-----------
:param data: an iterable collection of data
:param size: the window size
:param step: the window step
:param ret_type: if provided the used return type, otherwise ret_type(data)
'''
self.size = size
self.step = step
self.ret_type = ret_type
def transform(self, data, *args, **kwargs):
return windowed(data, self.size, step=self.step, ret_type=self.ret_type)
class Flat(BaseTransformer):
def __init__(self, ret_type=None):
'''
dp.Flat is the respective transformer for dp.flatten.
Parameters
-----------
:param ret_type: if provided the used return type, otherwise ret_type(data)
'''
self.ret_type = ret_type
def transform(self, data, *args, **kwargs):
return flatten(data, ret_type=self.ret_type)
class Union(BaseTransformer):
def __init__(self, *transformers, n_jobs=1, verbose=0, **kwargs):
'''
dp.Union is a construct to manipulate mutli-collections of data tiems.
Parameters
-----------
:param transformers: the transformers for the respective collections of data items
:param n_jobs: amount of used threads/processes
:param verbose: verbosity level for tqdm / joblib
:param kwargs: additional arguments for joblib.Parallel, e.g. backend='loky'
'''
self.transformers = transformers
self.n_jobs = n_jobs
self.verbose = verbose
self.kwargs = kwargs
def transform(self, data, *args, **kwargs):
if self.n_jobs == 1:
return [transformer.transform(items, *args, **kwargs)
for transformer, items in tqdm(zip(self.transformers, data), disable=self.verbose < 1)]
return Parallel(n_jobs=self.n_jobs, verbose=self.verbose, **self.kwargs)(delayed(transformer.transform)
(items, *args, **kwargs) for transformer, items in zip(self.transformers, data))
class Pipeline(BaseTransformer):
def __init__(self, *transformers, verbose=0):
'''
dp.Pipeline is a construct to pipe a collection of transformers.
Parameters
-----------
:param transformers: the transformer sequence to apply
:param verbose: verbosity level for tqdm
'''
self.transformers = list(transformers)
self.verbose = verbose
def transform(self, data, *args, **kwargs):
res = data
for transformer in tqdm(self.transformers, disable=self.verbose < 1):
res = transformer.transform(res, *args, **kwargs)
return res
| 36.028269
| 127
| 0.640643
| 1,277
| 10,196
| 4.981989
| 0.097103
| 0.033008
| 0.02122
| 0.037724
| 0.682804
| 0.666143
| 0.6094
| 0.600283
| 0.587865
| 0.556429
| 0
| 0.002518
| 0.260004
| 10,196
| 282
| 128
| 36.156028
| 0.840689
| 0.382601
| 0
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.205357
| false
| 0
| 0.035714
| 0.071429
| 0.455357
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
bcfb7330e40f9b79f2ab184f143d401951828548
| 2,513
|
py
|
Python
|
tacker/sol_refactored/common/vnf_instance_utils.py
|
h1r0mu/tacker
|
8c69dda51fcfe215c4878a86b82018d2b96e5561
|
[
"Apache-2.0"
] | 116
|
2015-10-18T02:57:08.000Z
|
2022-03-15T04:09:18.000Z
|
tacker/sol_refactored/common/vnf_instance_utils.py
|
h1r0mu/tacker
|
8c69dda51fcfe215c4878a86b82018d2b96e5561
|
[
"Apache-2.0"
] | 6
|
2016-11-07T22:15:54.000Z
|
2021-05-09T06:13:08.000Z
|
tacker/sol_refactored/common/vnf_instance_utils.py
|
h1r0mu/tacker
|
8c69dda51fcfe215c4878a86b82018d2b96e5561
|
[
"Apache-2.0"
] | 166
|
2015-10-20T15:31:52.000Z
|
2021-11-12T08:39:49.000Z
|
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored import objects
LOG = logging.getLogger(__name__) # not used at the moment
def get_inst(context, inst_id):
inst = objects.VnfInstanceV2.get_by_id(context, inst_id)
if inst is None:
raise sol_ex.VnfInstanceNotFound(inst_id=inst_id)
return inst
def get_inst_all(context):
return objects.VnfInstanceV2.get_all(context)
def inst_href(inst_id, endpoint):
return "{}/v2/vnflcm/vnf_instances/{}".format(endpoint, inst_id)
def make_inst_links(inst, endpoint):
links = objects.VnfInstanceV2_Links()
self_href = inst_href(inst.id, endpoint)
links.self = objects.Link(href=self_href)
if inst.instantiationState == 'NOT_INSTANTIATED':
links.instantiate = objects.Link(href=self_href + "/instantiate")
else: # 'INSTANTIATED'
links.terminate = objects.Link(href=self_href + "/terminate")
# TODO(oda-g): add when the operation supported
# links.scale = objects.Link(href = self_href + "/scale")
# etc.
return links
# see IETF RFC 7396
def json_merge_patch(target, patch):
if isinstance(patch, dict):
if not isinstance(target, dict):
target = {}
for key, value in patch.items():
if value is None:
if key in target:
del target[key]
else:
target[key] = json_merge_patch(target.get(key), value)
return target
else:
return patch
def select_vim_info(vim_connection_info):
# NOTE: It is assumed that vimConnectionInfo has only one item
# at the moment. If there are multiple items, it is uncertain
# which item is selected.
for vim_info in vim_connection_info.values():
return vim_info
| 32.217949
| 78
| 0.68842
| 345
| 2,513
| 4.884058
| 0.446377
| 0.024926
| 0.035608
| 0.045104
| 0.080712
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 0.229606
| 2,513
| 77
| 79
| 32.636364
| 0.862087
| 0.374055
| 0
| 0.078947
| 0
| 0
| 0.043254
| 0.018722
| 0
| 0
| 0
| 0.012987
| 0
| 1
| 0.157895
| false
| 0
| 0.078947
| 0.052632
| 0.421053
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4c0acd1ef9075a9d08118479182c8461e04d6e01
| 3,844
|
py
|
Python
|
texts.py
|
ProtKsen/pgame
|
c4455c6c07eaf275f9fcfa661cd6933ee7b1ff92
|
[
"MIT"
] | 2
|
2021-04-14T09:49:27.000Z
|
2022-03-08T17:26:49.000Z
|
texts.py
|
ProtKsen/pgame
|
c4455c6c07eaf275f9fcfa661cd6933ee7b1ff92
|
[
"MIT"
] | null | null | null |
texts.py
|
ProtKsen/pgame
|
c4455c6c07eaf275f9fcfa661cd6933ee7b1ff92
|
[
"MIT"
] | 2
|
2021-01-11T12:09:26.000Z
|
2021-04-14T09:49:45.000Z
|
"""Text parts."""
SEPARATOR = '----------------------------------'
CONT_GAME = 'enter для продолжения игры'
GREETING = 'Добро пожаловать в игру ''Сундук сокровищ''!\n' \
'Попробуй себя в роли капитана корабля, собери ' \
'команду и достань все сокровища!'
NAME_QUESTION = 'Как тебя зовут?'
CHOOSE_LEVEL = 'Выбери уровень сложности, он влияет на стоимость ' \
'сокровищ на островах. \n' \
'1 - легко \n' \
'2 - средне \n' \
'3 - тяжело'
INTRODUCTION = 'В наследство от дядюшки тебе достался корабль, \n' \
'несколько золотых монет и карта, на которой \n' \
'отмечены 10 островов. На каждом из островов \n' \
'зарыт клад. Но для того, чтобы достать его, \n' \
'необходимо обезвредить ловушку. Чем больше \n' \
'порядковый номер острова, тем ценнее хранящееся \n' \
'на нем сокровище и тем труднее его получить. \n\n' \
'Цель игры - добыть все сокровища и скопить как можно больше монет. \n\n' \
'Команда твоего корабля сможет обезвредить ловушку, \n' \
'только если будет иметь нужное количество очков \n' \
'логики, силы и ловкости. \n\n' \
'!!! Сумма всех требуемых очков равна номеру острова,\n' \
'но точная комбинация тебе неизвестна. !!!'
ORACLE_QUESTION = 'Здесь неподалеку живет известный оракул. За определенную\n' \
'плату он сможет предсказать с какой ловушкой\n' \
'ты столкнешься на острове. Пойдешь ли ты к нему?\n' \
'----------------------------------\n'\
'1 - да, пойду\n' \
'2 - нет, сам разберусь'
ORACLE_QUESTION_1 = 'Что ты хочешь узнать у оракула? \n' \
'----------------------------------\n'\
'1 - я передумал, буду сам себе оракул! \n'\
'2 - сколько очков логики должно быть у команды? (1 монета) \n'\
'3 - сколько очков силы должно быть у команды? (1 монета) \n'\
'4 - сколько очков ловкости должно быть у команды? (1 монета) \n'\
'5 - узнать все требуемые характеристики (3 монеты)'
ORACLE_QUESTION_2 = 'Что ты хочешь узнать у оракула? \n' \
'----------------------------------\n'\
'1 - я передумал, буду сам себе оракул! \n'\
'2 - сколько очков логики должно быть у команды? (1 монета) \n'\
'3 - сколько очков силы должно быть у команды? (1 монета) \n'\
'4 - сколько очков ловкости должно быть у команды? (1 монета)'
GO_TAVERN_TEXT = 'Отлично! Для похода на остров тебе понадобится \n' \
'команда, а нанять ее ты сможешь в таверне.'
EXIT_QUESTION = 'Продолжить игру?\n' \
'----------------------------------\n'\
'1 - да\n' \
'2 - нет'
SUCCESS_STEP = 'Поздравляю! Ты смог достать спрятанное сокровище! \n' \
'Самое время готовиться к следующему походу.'
FAILURE_STEP = 'К сожалению, ты не смог достать сокровище. \n' \
'Если у тебя еще остались монеты, то можешь \n' \
'попробовать организовать поход заново. Удачи!'
WINNING = 'Поздравляю! Ты собрал сокровища со всех окрестных \n' \
'островов, можешь выкинуть ненужную теперь карту) \n' \
'Конец игры.'
LOSING = 'Сожалею, ты потратил все деньги. Карьера пиратского \n' \
'капитана подошла к концу. А дядюшка в тебя верил! \n' \
'Конец игры.'
NAMES = ['Боб', 'Ричард', 'Алан', 'Степан', 'Грозный Глаз', 'Гарри',
'Моррис', 'Джек', 'Алекс', 'Сэм', 'Том', 'Янис', 'Геральт',
'Ринсвинд', 'Купер', 'Борис', 'Джон', 'Рон']
| 48.05
| 90
| 0.533299
| 434
| 3,844
| 4.693548
| 0.532258
| 0.006873
| 0.032401
| 0.053019
| 0.18704
| 0.18704
| 0.18704
| 0.186549
| 0.186549
| 0.186549
| 0
| 0.010349
| 0.32128
| 3,844
| 79
| 91
| 48.658228
| 0.77041
| 0.002862
| 0
| 0.190476
| 0
| 0
| 0.655344
| 0.046512
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4c1514125da0b6d26946b5990ca8e3d69b019fd3
| 1,369
|
py
|
Python
|
tests/core/feature_extraction/test_galaxyProcessor.py
|
EmilioCC/gti770-student-framework
|
3cd72da8fe78c7ecfc26c9e688cbe1b7deee353a
|
[
"MIT"
] | null | null | null |
tests/core/feature_extraction/test_galaxyProcessor.py
|
EmilioCC/gti770-student-framework
|
3cd72da8fe78c7ecfc26c9e688cbe1b7deee353a
|
[
"MIT"
] | null | null | null |
tests/core/feature_extraction/test_galaxyProcessor.py
|
EmilioCC/gti770-student-framework
|
3cd72da8fe78c7ecfc26c9e688cbe1b7deee353a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import numpy as np
from unittest import TestCase
from core.feature_extraction.galaxy.galaxy_processor import GalaxyProcessor
from commons.helpers.dataset.strategies.galaxy_dataset.label_strategy import GalaxyDataSetLabelStrategy
from commons.helpers.dataset.context import Context
class TestGalaxyProcessor(TestCase):
def setUp(self):
validation_size = 0.2
# Get the ground truth CSV file from script's parameters.
self.galaxy_csv_file = os.environ["VIRTUAL_ENV"] + "/data/csv/galaxy/galaxy.csv"
self.galaxy_images_path = os.environ["VIRTUAL_ENV"] + "/data/images/"
# Create instance of data set loading strategies.
galaxy_label_data_set_strategy = GalaxyDataSetLabelStrategy()
# Set the context to galaxy label data set loading strategy.
context = Context(galaxy_label_data_set_strategy)
context.set_strategy(galaxy_label_data_set_strategy)
self.label_dataset = context.load_dataset(csv_file=self.galaxy_csv_file, one_hot=False,
validation_size=np.float32(validation_size))
def testGalaxyProcessor(self):
# Process galaxies.
galaxy_processor = GalaxyProcessor(self.galaxy_images_path)
#features = galaxy_processor.process_galaxy(self.label_dataset)
| 42.78125
| 103
| 0.731921
| 166
| 1,369
| 5.807229
| 0.39759
| 0.036307
| 0.062241
| 0.074689
| 0.128631
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004513
| 0.19065
| 1,369
| 32
| 104
| 42.78125
| 0.865523
| 0.208181
| 0
| 0
| 0
| 0
| 0.057514
| 0.025046
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.333333
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
4c155f18a1b1670b63f094d3de08857496d9f8be
| 1,630
|
py
|
Python
|
gmso/formats/formats_registry.py
|
chrisiacovella/gmso
|
c78e2425ccb98ea952f024a569346d36045f6918
|
[
"MIT"
] | 20
|
2020-02-28T21:47:54.000Z
|
2022-02-14T20:13:56.000Z
|
gmso/formats/formats_registry.py
|
chrisiacovella/gmso
|
c78e2425ccb98ea952f024a569346d36045f6918
|
[
"MIT"
] | 364
|
2020-03-02T16:11:57.000Z
|
2022-03-29T00:57:00.000Z
|
gmso/formats/formats_registry.py
|
chrisiacovella/gmso
|
c78e2425ccb98ea952f024a569346d36045f6918
|
[
"MIT"
] | 28
|
2020-02-28T21:12:30.000Z
|
2022-01-31T21:02:30.000Z
|
"""Registry utilities to handle formats for gmso Topology."""
class UnsupportedFileFormatError(Exception):
"""Exception to be raised whenever the file loading or saving is not supported."""
class Registry:
"""A registry to incorporate a callable with a file extension."""
def __init__(self):
self.handlers = {}
def _assert_can_process(self, extension):
if extension not in self.handlers:
raise UnsupportedFileFormatError(
f"Extension {extension} cannot be processed as no utility "
f"is defined in the current API to handle {extension} files."
)
def get_callable(self, extension):
"""Get the callable associated with extension."""
self._assert_can_process(extension)
return self.handlers[extension]
SaversRegistry = Registry()
LoadersRegistry = Registry()
class saves_as:
"""Decorator to aid saving."""
def __init__(self, *extensions):
extension_set = set(extensions)
self.extensions = extension_set
def __call__(self, method):
"""Register the method as saver for an extension."""
for ext in self.extensions:
SaversRegistry.handlers[ext] = method
return method
class loads_as:
"""Decorator to aid loading."""
def __init__(self, *extensions):
extension_set = set(extensions)
self.extensions = extension_set
def __call__(self, method):
"""Register the method as loader for an extension."""
for ext in self.extensions:
LoadersRegistry.handlers[ext] = method
return method
| 28.596491
| 86
| 0.655828
| 183
| 1,630
| 5.661202
| 0.355191
| 0.081081
| 0.088803
| 0.100386
| 0.333977
| 0.277992
| 0.277992
| 0.277992
| 0.208494
| 0.208494
| 0
| 0
| 0.257055
| 1,630
| 56
| 87
| 29.107143
| 0.855491
| 0.234356
| 0
| 0.387097
| 0
| 0
| 0.094449
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 1
| 0.225806
| false
| 0
| 0
| 0
| 0.451613
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4c1e5197c84f6ae0e879e45cf958dcfad6b26bdf
| 7,000
|
py
|
Python
|
console.py
|
aplneto/redes_projeto
|
450ef8ac61e46bc38ff34142d07eda3d726ce326
|
[
"MIT"
] | 1
|
2019-04-04T13:10:01.000Z
|
2019-04-04T13:10:01.000Z
|
console.py
|
aplneto/redes_projeto
|
450ef8ac61e46bc38ff34142d07eda3d726ce326
|
[
"MIT"
] | null | null | null |
console.py
|
aplneto/redes_projeto
|
450ef8ac61e46bc38ff34142d07eda3d726ce326
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Módulo de configuração dos consoles
"""
from Crypto.PublicKey import RSA
import socket
import os
import base64
class Console(object):
"""Superclasse Console
Classe base para os terminais de cliente e servidor.
Attributes:
logged (bool): True caso o usuário tenha realizado o login com sucesso,
False caso contrário
"""
def __init__(self, **kwargs):
"""Método construtor do console
Kwargs:
sock (socket): socket de comunicação
key_file (str): arquivo para inicialização de par de chaves
"""
self.sock = kwargs.get('sock',
socket.socket(socket.AF_INET,
socket.SOCK_STREAM))
key_file = kwargs.get('key_file', '')
if key_file:
self.privatekey, self.publickey = Console.start_key(key_file)
def run(self):
"""Método run difere entre o Console do Host e o do Client
O Método run controla o comportamento do objeto como um todo.
Todo o comportamento de um console individual deve ser definido dentro
do método run.
"""
raise NotImplemented
@staticmethod
def start_key(key_file):
"""Método de inicialização das chaves
Esse método inicializa a chave privada e prepara, também, a chave
pública para envio.
Args:
key_file (str): endereço do arquivo da chave privada
Returns:
(tuple) uma tupla contendo um par _RSAobj (chave privada) e byte
(inicializador da chave pública)
"""
try:
keyfile = open(key_file, 'rb')
except FileNotFoundError:
private_key = RSA.generate(1024)
else:
private_key = RSA.importKey(keyfile.read())
keyfile.close()
finally:
public_key = private_key.publickey().exportKey()
return private_key, public_key
def receive_key(self):
"""Troca de chaves no início da comunicação
Ao se conectarem, servidor e cliente trocam suas chaves públicas um com
o outro. Esse método retorna um objeto do tipo RSA público a partir da
chave pública recebida através de um socket.
Returns:
(_RSAobj) chave pública para criptografia.
"""
k = self.sock.recv(1024)
key = RSA.importKey(k)
return key
def send(self, msg):
"""Método send envia strings simples através do socket
O Método send é o método usado apara enviar mensagens simples através
de um socket. Dentro desse método ocorrem as criptografias RSA e base64
antes do envio."
Args:
msg (str ou bytes): mensagem a ser enviada
"""
msg = self.encrypt(msg)
self.sock.send(msg)
def receive(self, b = 160):
"""Método receive recebe mensagens simples através do socket
É através desse método que o usuário recebe mensagens simples através
do socket. As mensagens chegam criptografadas e a descriptografia
acontece dentro do método receive.
Args:
b (int): quantidade de bytes a serem recebidos
Returns:
(str) mensagem decifrada
"""
msg = self.decrypt(self.sock.recv(b))
return msg.decode('utf-8')
def encrypt(self, msg):
"""Criptografia de uma string ou trecho de bytes
Args:
msg (str ou bytes): string ou bytes a serem criptografados.
Returns:
(bytes) segmento de bytes criptografados
"""
if isinstance(msg, str):
msg = msg.encode('utf-8')
msg = self.publickey.encrypt(msg, 3.14159265359)
msg = base64.a85encode(msg[0])
return msg
def decrypt(self, msg):
"""Método de conversão de um trecho criptografado
Args:
msg (bytes): trecho de mensagem a ser decifrado
Returns:
(bytes): trecho de bytes decifrados
"""
msg = base64.a85decode(msg)
msg = self.privatekey.decrypt(msg)
return msg
def send_file(self, filename):
"""Rotina de envio de arquivos através de sockets
Esse método controla o envio sequencial de segmentos de um arquivo
através de um socket, gerando a cada envio um número inteiro referente
a quantidade de bytes enviados até o momento.
Método deve ser usado como um gerador. Veja exemplo abaixo.
Example:
for b in self.sendfile('alice.txt'):
if b == -1:
print("Houve um erro na transferência")
else:
print(str(b) + "de " str(file_size) "bytes enviados")
Args:
filename (str): endereço do arquivo
Yields:
(int) quantidade de bytes enviados ou -1, em caso de erro
"""
size = os.path.getsize(filename)
self.send(str(size))
sent = 0
file = open(filename, 'rb')
while sent < size:
ack = self.receive()
nxt = file.read(1024)
self.sock.send(nxt)
sent += len(nxt)
yield sent
file.close()
def receive_file(self, filename):
"""Rotina de recebimento de arquivos através de sockets
Esse método controla o recebeimendo de sementos de arquivos através de
um socket. O método gera a quantidade de bytes recebidos a cada nova
mensagem recebida do socket, por tanto, deve ser usado como um gerador.
Example:
for b in receive_file(filename):
print(str(b) + " de " str(filesize) " bytes recebidos.")
Args:
filename(str): nome do arquivo
Yields:
(int) quantidade de bytes recebidos
"""
size = int(self.receive())
file = open(filename, 'wb')
rcvd = 0
while rcvd < size:
self.send('ack')
nxt = self.sock.recv(1024)
rcvd += len(nxt)
file.write(nxt)
yield rcvd
file.close()
def __repr__(self):
return "{0}({1}, {2}, key_file = {3})".format(self.__class__.__name__,
self.sock.__repr__(), self.client.__repr__(),
repr(self.key_file))
| 32.110092
| 80
| 0.532143
| 770
| 7,000
| 4.771429
| 0.327273
| 0.019053
| 0.023136
| 0.018508
| 0.10724
| 0.0773
| 0.043549
| 0.024496
| 0.024496
| 0
| 0
| 0.013242
| 0.395857
| 7,000
| 218
| 81
| 32.110092
| 0.855521
| 0.460857
| 0
| 0.054795
| 0
| 0
| 0.02305
| 0
| 0
| 0
| 0
| 0.087156
| 0
| 1
| 0.150685
| false
| 0
| 0.082192
| 0.013699
| 0.328767
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4c1e5c9719ab7645023165c5beb655aadf6e00c7
| 4,988
|
py
|
Python
|
sandbox/settings.py
|
OmenApps/marion
|
f501674cafbd91f0bbad7454e4dcf3527cf4445e
|
[
"MIT"
] | null | null | null |
sandbox/settings.py
|
OmenApps/marion
|
f501674cafbd91f0bbad7454e4dcf3527cf4445e
|
[
"MIT"
] | null | null | null |
sandbox/settings.py
|
OmenApps/marion
|
f501674cafbd91f0bbad7454e4dcf3527cf4445e
|
[
"MIT"
] | null | null | null |
"""
Django settings for marion project.
"""
from pathlib import Path
from tempfile import mkdtemp
from configurations import Configuration, values
BASE_DIR = Path(__file__).parent.resolve()
DATA_DIR = Path("/data")
# pylint: disable=no-init
class Base(Configuration):
"""
This is the base configuration every configuration (aka environnement)
should inherit from. It is recommended to configure third-party
applications by creating a configuration mixins in ./configurations and
compose the Base configuration with those mixins.
It depends on an environment variable that SHOULD be defined:
* DJANGO_SECRET_KEY
You may also want to override default configuration by setting the
following environment variables:
* DB_NAME
* DB_HOST
* DB_PASSWORD
* DB_USER
"""
DEBUG = False
# Security
ALLOWED_HOSTS = []
SECRET_KEY = values.Value(None)
# SECURE_PROXY_SSL_HEADER allows to fix the scheme in Django's HttpRequest
# object when you application is behind a reverse proxy.
#
# Keep this SECURE_PROXY_SSL_HEADER configuration only if :
# - your Django app is behind a proxy.
# - your proxy strips the X-Forwarded-Proto header from all incoming requests
# - Your proxy sets the X-Forwarded-Proto header and sends it to Django
#
# In other cases, you should comment the following line to avoid security issues.
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
),
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# Application
ROOT_URLCONF = "urls"
WSGI_APPLICATION = "wsgi.application"
# Database
DATABASES = {
"default": {
"ENGINE": values.Value(
"django.db.backends.postgresql_psycopg2",
environ_name="DB_ENGINE",
environ_prefix=None,
),
"NAME": values.Value("marion", environ_name="DB_NAME", environ_prefix=None),
"USER": values.Value("fun", environ_name="DB_USER", environ_prefix=None),
"PASSWORD": values.Value(
"pass", environ_name="DB_PASSWORD", environ_prefix=None
),
"HOST": values.Value(
"localhost", environ_name="DB_HOST", environ_prefix=None
),
"PORT": values.Value(5432, environ_name="DB_PORT", environ_prefix=None),
}
}
# Static files (CSS, JavaScript, Images)
STATIC_URL = "/static/"
STATIC_ROOT = DATA_DIR.joinpath("static")
MEDIA_URL = "/media/"
MEDIA_ROOT = DATA_DIR.joinpath("media")
# Internationalization
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"rest_framework",
"marion",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
class Development(Base):
"""
Development environment settings
We set DEBUG to True and configure the server to respond from all hosts.
"""
DEBUG = True
ALLOWED_HOSTS = ["*"]
ROOT_URLCONF = "urls.debug"
# Application definition
INSTALLED_APPS = Base.INSTALLED_APPS + [
"howard",
]
MARION_DOCUMENT_ISSUER_CHOICES_CLASS = "howard.defaults.DocumentIssuerChoices"
class Test(Base):
"""Test environment settings"""
MEDIA_ROOT = Path(mkdtemp())
ROOT_URLCONF = "urls.debug"
| 30.230303
| 88
| 0.635525
| 503
| 4,988
| 6.139165
| 0.405567
| 0.063148
| 0.038536
| 0.027202
| 0.066062
| 0.050518
| 0
| 0
| 0
| 0
| 0
| 0.002456
| 0.265237
| 4,988
| 164
| 89
| 30.414634
| 0.840109
| 0.255413
| 0
| 0.063158
| 0
| 0
| 0.370668
| 0.282783
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.073684
| 0.031579
| 0
| 0.347368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.