hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
07dee507ce31e115b2b94a29d53cdc5c3d4bd0df
| 2,316
|
py
|
Python
|
scripts/examples/OpenMV/16-Codes/find_barcodes.py
|
jiskra/openmv
|
a0f321836f77f94d8118910598dcdb79eb784d58
|
[
"MIT"
] | 1,761
|
2015-07-10T23:14:17.000Z
|
2022-03-30T07:49:49.000Z
|
scripts/examples/OpenMV/16-Codes/find_barcodes.py
|
jiskra/openmv
|
a0f321836f77f94d8118910598dcdb79eb784d58
|
[
"MIT"
] | 487
|
2015-07-07T23:21:20.000Z
|
2022-03-30T17:13:22.000Z
|
scripts/examples/OpenMV/16-Codes/find_barcodes.py
|
jiskra/openmv
|
a0f321836f77f94d8118910598dcdb79eb784d58
|
[
"MIT"
] | 882
|
2015-08-01T08:34:19.000Z
|
2022-03-30T07:36:23.000Z
|
# Barcode Example
#
# This example shows off how easy it is to detect bar codes using the
# OpenMV Cam M7. Barcode detection does not work on the M4 Camera.
import sensor, image, time, math
sensor.reset()
sensor.set_pixformat(sensor.GRAYSCALE)
sensor.set_framesize(sensor.VGA) # High Res!
sensor.set_windowing((640, 80)) # V Res of 80 == less work (40 for 2X the speed).
sensor.skip_frames(time = 2000)
sensor.set_auto_gain(False) # must turn this off to prevent image washout...
sensor.set_auto_whitebal(False) # must turn this off to prevent image washout...
clock = time.clock()
# Barcode detection can run at the full 640x480 resolution of your OpenMV Cam's
# OV7725 camera module. Barcode detection will also work in RGB565 mode but at
# a lower resolution. That said, barcode detection requires a higher resolution
# to work well so it should always be run at 640x480 in grayscale...
def barcode_name(code):
if(code.type() == image.EAN2):
return "EAN2"
if(code.type() == image.EAN5):
return "EAN5"
if(code.type() == image.EAN8):
return "EAN8"
if(code.type() == image.UPCE):
return "UPCE"
if(code.type() == image.ISBN10):
return "ISBN10"
if(code.type() == image.UPCA):
return "UPCA"
if(code.type() == image.EAN13):
return "EAN13"
if(code.type() == image.ISBN13):
return "ISBN13"
if(code.type() == image.I25):
return "I25"
if(code.type() == image.DATABAR):
return "DATABAR"
if(code.type() == image.DATABAR_EXP):
return "DATABAR_EXP"
if(code.type() == image.CODABAR):
return "CODABAR"
if(code.type() == image.CODE39):
return "CODE39"
if(code.type() == image.PDF417):
return "PDF417"
if(code.type() == image.CODE93):
return "CODE93"
if(code.type() == image.CODE128):
return "CODE128"
while(True):
clock.tick()
img = sensor.snapshot()
codes = img.find_barcodes()
for code in codes:
img.draw_rectangle(code.rect())
print_args = (barcode_name(code), code.payload(), (180 * code.rotation()) / math.pi, code.quality(), clock.fps())
print("Barcode %s, Payload \"%s\", rotation %f (degrees), quality %d, FPS %f" % print_args)
if not codes:
print("FPS %f" % clock.fps())
| 35.090909
| 121
| 0.638169
| 324
| 2,316
| 4.512346
| 0.401235
| 0.065663
| 0.109439
| 0.164159
| 0.086183
| 0.056088
| 0.056088
| 0.056088
| 0.056088
| 0
| 0
| 0.04442
| 0.222366
| 2,316
| 65
| 122
| 35.630769
| 0.767351
| 0.259067
| 0
| 0
| 0
| 0
| 0.093952
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019231
| false
| 0
| 0.019231
| 0
| 0.346154
| 0.057692
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
07e0af37e19269400d7c0cf5ac0dc3b1672e18e4
| 10,752
|
py
|
Python
|
tests/test_packed_to_padded.py
|
theycallmepeter/pytorch3d_PBR
|
bc83c23969ff7843fc05d2da001952b368926174
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_packed_to_padded.py
|
theycallmepeter/pytorch3d_PBR
|
bc83c23969ff7843fc05d2da001952b368926174
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_packed_to_padded.py
|
theycallmepeter/pytorch3d_PBR
|
bc83c23969ff7843fc05d2da001952b368926174
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from common_testing import TestCaseMixin, get_random_cuda_device
from pytorch3d.ops import packed_to_padded, padded_to_packed
from pytorch3d.structures.meshes import Meshes
class TestPackedToPadded(TestCaseMixin, unittest.TestCase):
def setUp(self) -> None:
super().setUp()
torch.manual_seed(1)
@staticmethod
def init_meshes(
num_meshes: int = 10,
num_verts: int = 1000,
num_faces: int = 3000,
device: str = "cpu",
):
device = torch.device(device)
verts_list = []
faces_list = []
for _ in range(num_meshes):
verts = torch.rand((num_verts, 3), dtype=torch.float32, device=device)
faces = torch.randint(
num_verts, size=(num_faces, 3), dtype=torch.int64, device=device
)
verts_list.append(verts)
faces_list.append(faces)
meshes = Meshes(verts_list, faces_list)
return meshes
@staticmethod
def packed_to_padded_python(inputs, first_idxs, max_size, device):
"""
PyTorch implementation of packed_to_padded function.
"""
num_meshes = first_idxs.size(0)
D = inputs.shape[1] if inputs.dim() == 2 else 0
if D == 0:
inputs_padded = torch.zeros((num_meshes, max_size), device=device)
else:
inputs_padded = torch.zeros((num_meshes, max_size, D), device=device)
for m in range(num_meshes):
s = first_idxs[m]
if m == num_meshes - 1:
f = inputs.shape[0]
else:
f = first_idxs[m + 1]
inputs_padded[m, :f] = inputs[s:f]
return inputs_padded
@staticmethod
def padded_to_packed_python(inputs, first_idxs, num_inputs, device):
"""
PyTorch implementation of padded_to_packed function.
"""
num_meshes = inputs.size(0)
D = inputs.shape[2] if inputs.dim() == 3 else 0
if D == 0:
inputs_packed = torch.zeros((num_inputs,), device=device)
else:
inputs_packed = torch.zeros((num_inputs, D), device=device)
for m in range(num_meshes):
s = first_idxs[m]
if m == num_meshes - 1:
f = num_inputs
else:
f = first_idxs[m + 1]
inputs_packed[s:f] = inputs[m, :f]
return inputs_packed
def _test_packed_to_padded_helper(self, D, device):
"""
Check the results from packed_to_padded and PyTorch implementations
are the same.
"""
meshes = self.init_meshes(16, 100, 300, device=device)
faces = meshes.faces_packed()
mesh_to_faces_packed_first_idx = meshes.mesh_to_faces_packed_first_idx()
max_faces = meshes.num_faces_per_mesh().max().item()
if D == 0:
values = torch.rand((faces.shape[0],), device=device, requires_grad=True)
else:
values = torch.rand((faces.shape[0], D), device=device, requires_grad=True)
values_torch = values.detach().clone()
values_torch.requires_grad = True
values_padded = packed_to_padded(
values, mesh_to_faces_packed_first_idx, max_faces
)
values_padded_torch = TestPackedToPadded.packed_to_padded_python(
values_torch, mesh_to_faces_packed_first_idx, max_faces, device
)
# check forward
self.assertClose(values_padded, values_padded_torch)
# check backward
if D == 0:
grad_inputs = torch.rand((len(meshes), max_faces), device=device)
else:
grad_inputs = torch.rand((len(meshes), max_faces, D), device=device)
values_padded.backward(grad_inputs)
grad_outputs = values.grad
values_padded_torch.backward(grad_inputs)
grad_outputs_torch1 = values_torch.grad
grad_outputs_torch2 = TestPackedToPadded.padded_to_packed_python(
grad_inputs, mesh_to_faces_packed_first_idx, values.size(0), device=device
)
self.assertClose(grad_outputs, grad_outputs_torch1)
self.assertClose(grad_outputs, grad_outputs_torch2)
def test_packed_to_padded_flat_cpu(self):
self._test_packed_to_padded_helper(0, "cpu")
def test_packed_to_padded_D1_cpu(self):
self._test_packed_to_padded_helper(1, "cpu")
def test_packed_to_padded_D16_cpu(self):
self._test_packed_to_padded_helper(16, "cpu")
def test_packed_to_padded_flat_cuda(self):
device = get_random_cuda_device()
self._test_packed_to_padded_helper(0, device)
def test_packed_to_padded_D1_cuda(self):
device = get_random_cuda_device()
self._test_packed_to_padded_helper(1, device)
def test_packed_to_padded_D16_cuda(self):
device = get_random_cuda_device()
self._test_packed_to_padded_helper(16, device)
def _test_padded_to_packed_helper(self, D, device):
"""
Check the results from packed_to_padded and PyTorch implementations
are the same.
"""
meshes = self.init_meshes(16, 100, 300, device=device)
mesh_to_faces_packed_first_idx = meshes.mesh_to_faces_packed_first_idx()
num_faces_per_mesh = meshes.num_faces_per_mesh()
max_faces = num_faces_per_mesh.max().item()
if D == 0:
values = torch.rand((len(meshes), max_faces), device=device)
else:
values = torch.rand((len(meshes), max_faces, D), device=device)
for i, num in enumerate(num_faces_per_mesh):
values[i, num:] = 0
values.requires_grad = True
values_torch = values.detach().clone()
values_torch.requires_grad = True
values_packed = padded_to_packed(
values, mesh_to_faces_packed_first_idx, num_faces_per_mesh.sum().item()
)
values_packed_torch = TestPackedToPadded.padded_to_packed_python(
values_torch,
mesh_to_faces_packed_first_idx,
num_faces_per_mesh.sum().item(),
device,
)
# check forward
self.assertClose(values_packed, values_packed_torch)
# check backward
if D == 0:
grad_inputs = torch.rand((num_faces_per_mesh.sum().item()), device=device)
else:
grad_inputs = torch.rand(
(num_faces_per_mesh.sum().item(), D), device=device
)
values_packed.backward(grad_inputs)
grad_outputs = values.grad
values_packed_torch.backward(grad_inputs)
grad_outputs_torch1 = values_torch.grad
grad_outputs_torch2 = TestPackedToPadded.packed_to_padded_python(
grad_inputs, mesh_to_faces_packed_first_idx, values.size(1), device=device
)
self.assertClose(grad_outputs, grad_outputs_torch1)
self.assertClose(grad_outputs, grad_outputs_torch2)
def test_padded_to_packed_flat_cpu(self):
self._test_padded_to_packed_helper(0, "cpu")
def test_padded_to_packed_D1_cpu(self):
self._test_padded_to_packed_helper(1, "cpu")
def test_padded_to_packed_D16_cpu(self):
self._test_padded_to_packed_helper(16, "cpu")
def test_padded_to_packed_flat_cuda(self):
device = get_random_cuda_device()
self._test_padded_to_packed_helper(0, device)
def test_padded_to_packed_D1_cuda(self):
device = get_random_cuda_device()
self._test_padded_to_packed_helper(1, device)
def test_padded_to_packed_D16_cuda(self):
device = get_random_cuda_device()
self._test_padded_to_packed_helper(16, device)
def test_invalid_inputs_shapes(self, device="cuda:0"):
with self.assertRaisesRegex(ValueError, "input can only be 2-dimensional."):
values = torch.rand((100, 50, 2), device=device)
first_idxs = torch.tensor([0, 80], dtype=torch.int64, device=device)
packed_to_padded(values, first_idxs, 100)
with self.assertRaisesRegex(ValueError, "input can only be 3-dimensional."):
values = torch.rand((100,), device=device)
first_idxs = torch.tensor([0, 80], dtype=torch.int64, device=device)
padded_to_packed(values, first_idxs, 20)
with self.assertRaisesRegex(ValueError, "input can only be 3-dimensional."):
values = torch.rand((100, 50, 2, 2), device=device)
first_idxs = torch.tensor([0, 80], dtype=torch.int64, device=device)
padded_to_packed(values, first_idxs, 20)
@staticmethod
def packed_to_padded_with_init(
num_meshes: int, num_verts: int, num_faces: int, num_d: int, device: str = "cpu"
):
meshes = TestPackedToPadded.init_meshes(
num_meshes, num_verts, num_faces, device
)
faces = meshes.faces_packed()
mesh_to_faces_packed_first_idx = meshes.mesh_to_faces_packed_first_idx()
max_faces = meshes.num_faces_per_mesh().max().item()
if num_d == 0:
values = torch.rand((faces.shape[0],), device=meshes.device)
else:
values = torch.rand((faces.shape[0], num_d), device=meshes.device)
torch.cuda.synchronize()
def out():
packed_to_padded(values, mesh_to_faces_packed_first_idx, max_faces)
torch.cuda.synchronize()
return out
@staticmethod
def packed_to_padded_with_init_torch(
num_meshes: int, num_verts: int, num_faces: int, num_d: int, device: str = "cpu"
):
meshes = TestPackedToPadded.init_meshes(
num_meshes, num_verts, num_faces, device
)
faces = meshes.faces_packed()
mesh_to_faces_packed_first_idx = meshes.mesh_to_faces_packed_first_idx()
max_faces = meshes.num_faces_per_mesh().max().item()
if num_d == 0:
values = torch.rand((faces.shape[0],), device=meshes.device)
else:
values = torch.rand((faces.shape[0], num_d), device=meshes.device)
torch.cuda.synchronize()
def out():
TestPackedToPadded.packed_to_padded_python(
values, mesh_to_faces_packed_first_idx, max_faces, device
)
torch.cuda.synchronize()
return out
| 39.384615
| 89
| 0.628255
| 1,355
| 10,752
| 4.635424
| 0.108487
| 0.033116
| 0.057953
| 0.043305
| 0.780608
| 0.736666
| 0.646712
| 0.609776
| 0.540519
| 0.496099
| 0
| 0.018483
| 0.280413
| 10,752
| 272
| 90
| 39.529412
| 0.793331
| 0.048735
| 0
| 0.41784
| 0
| 0
| 0.013111
| 0
| 0
| 0
| 0
| 0
| 0.042254
| 1
| 0.107981
| false
| 0
| 0.023474
| 0
| 0.159624
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
07e2537b3e43653ce0616ed6421ef634050042c8
| 3,085
|
py
|
Python
|
pysrc/classifier.py
|
CrackerCat/xed
|
428712c28e831573579b7f749db63d3a58dcdbd9
|
[
"Apache-2.0"
] | 1,261
|
2016-12-16T14:29:30.000Z
|
2022-03-30T20:21:25.000Z
|
pysrc/classifier.py
|
CrackerCat/xed
|
428712c28e831573579b7f749db63d3a58dcdbd9
|
[
"Apache-2.0"
] | 190
|
2016-12-17T13:44:09.000Z
|
2022-03-27T09:28:13.000Z
|
pysrc/classifier.py
|
CrackerCat/xed
|
428712c28e831573579b7f749db63d3a58dcdbd9
|
[
"Apache-2.0"
] | 155
|
2016-12-16T22:17:20.000Z
|
2022-02-16T20:53:59.000Z
|
#!/usr/bin/env python
# -*- python -*-
#BEGIN_LEGAL
#
#Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#END_LEGAL
from __future__ import print_function
import re
import genutil
import codegen
def _emit_function(fe, isa_sets, name):
fo = codegen.function_object_t('xed_classify_{}'.format(name))
fo.add_arg('const xed_decoded_inst_t* d')
fo.add_code_eol(' const xed_isa_set_enum_t isa_set = xed_decoded_inst_get_isa_set(d)')
# FIXME: 2017-07-14 optimization: could use a static array for faster checking, smaller code
switch = codegen.c_switch_generator_t('isa_set', fo)
isa_sets_sorted = sorted(isa_sets)
for c in isa_sets_sorted:
switch.add_case('XED_ISA_SET_{}'.format(c.upper()),[],do_break=False)
if len(isa_sets) > 0:
switch.add('return 1;')
switch.add_default(['return 0;'], do_break=False)
switch.finish()
fo.emit_file_emitter(fe)
def work(agi):
sse_isa_sets = set([])
avx_isa_sets = set([])
avx512_isa_sets = set([])
avx512_kmask_op = set([])
for generator in agi.generator_list:
for ii in generator.parser_output.instructions:
if genutil.field_check(ii, 'iclass'):
if re.search('AVX512',ii.isa_set):
avx512_isa_sets.add(ii.isa_set)
if re.search('KOP',ii.isa_set):
avx512_kmask_op.add(ii.isa_set)
elif re.search('AVX',ii.isa_set) or ii.isa_set in ['F16C', 'FMA']:
avx_isa_sets.add(ii.isa_set)
elif re.search('SSE',ii.isa_set) or ii.isa_set in ['AES','PCLMULQDQ']:
# Exclude MMX instructions that come in with SSE2 &
# SSSE3. The several purely MMX instr in SSE are
# "SSE-opcodes" with memop operands. One can look for
# those with SSE2MMX and SSSE3MMX xed isa_sets.
#
# Also exclude the SSE_PREFETCH operations; Those are
# just memops.
if (not re.search('MMX',ii.isa_set) and not re.search('PREFETCH',ii.isa_set)
and not re.search('X87',ii.isa_set) and not re.search('MWAIT',ii.isa_set)):
sse_isa_sets.add(ii.isa_set)
fe = agi.open_file('xed-classifiers.c') # xed_file_emitter_t
_emit_function(fe, avx512_isa_sets, 'avx512')
_emit_function(fe, avx512_kmask_op, 'avx512_maskop')
_emit_function(fe, avx_isa_sets, 'avx')
_emit_function(fe, sse_isa_sets, 'sse')
fe.close()
return
| 39.551282
| 96
| 0.647326
| 454
| 3,085
| 4.167401
| 0.396476
| 0.060254
| 0.059197
| 0.023256
| 0.103066
| 0.103066
| 0.080338
| 0.021142
| 0
| 0
| 0
| 0.023226
| 0.246353
| 3,085
| 77
| 97
| 40.064935
| 0.790538
| 0.322204
| 0
| 0
| 0
| 0
| 0.122152
| 0.015027
| 0
| 0
| 0
| 0.012987
| 0
| 1
| 0.047619
| false
| 0
| 0.095238
| 0
| 0.166667
| 0.02381
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
07e5b14fe954fccf9ada38a8fb44f9dd227c6830
| 1,301
|
py
|
Python
|
tests/web/config.py
|
zcqian/biothings.api
|
61c0300317cf2ac7db8310b5b5741ad9b08c4163
|
[
"Apache-2.0"
] | null | null | null |
tests/web/config.py
|
zcqian/biothings.api
|
61c0300317cf2ac7db8310b5b5741ad9b08c4163
|
[
"Apache-2.0"
] | null | null | null |
tests/web/config.py
|
zcqian/biothings.api
|
61c0300317cf2ac7db8310b5b5741ad9b08c4163
|
[
"Apache-2.0"
] | null | null | null |
"""
Web settings to override for testing.
"""
import os
from biothings.web.settings.default import QUERY_KWARGS
# *****************************************************************************
# Elasticsearch Variables
# *****************************************************************************
ES_INDEX = 'bts_test'
ES_DOC_TYPE = 'gene'
ES_SCROLL_SIZE = 60
# *****************************************************************************
# User Input Control
# *****************************************************************************
# use a smaller size for testing
QUERY_KWARGS['GET']['facet_size']['default'] = 3
QUERY_KWARGS['GET']['facet_size']['max'] = 5
QUERY_KWARGS['POST']['q']['jsoninput'] = True
# *****************************************************************************
# Elasticsearch Query Builder
# *****************************************************************************
ALLOW_RANDOM_QUERY = True
ALLOW_NESTED_AGGS = True
USERQUERY_DIR = os.path.join(os.path.dirname(__file__), 'userquery')
# *****************************************************************************
# Endpoints Specifics
# *****************************************************************************
STATUS_CHECK = {
'id': '1017',
'index': 'bts_test',
'doc_type': '_all'
}
| 34.236842
| 79
| 0.362798
| 92
| 1,301
| 4.858696
| 0.630435
| 0.098434
| 0.053691
| 0.085011
| 0.102908
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006717
| 0.08455
| 1,301
| 37
| 80
| 35.162162
| 0.368598
| 0.602613
| 0
| 0
| 0
| 0
| 0.206897
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
07ea3ff52f1fa71b79053f13390d47944be9bd66
| 499
|
py
|
Python
|
examples/mcp3xxx_mcp3002_single_ended_simpletest.py
|
sommersoft/Adafruit_CircuitPython_MCP3xxx
|
94088a7e2b30f1b34e8a5fd7076075d88aad460b
|
[
"MIT"
] | null | null | null |
examples/mcp3xxx_mcp3002_single_ended_simpletest.py
|
sommersoft/Adafruit_CircuitPython_MCP3xxx
|
94088a7e2b30f1b34e8a5fd7076075d88aad460b
|
[
"MIT"
] | null | null | null |
examples/mcp3xxx_mcp3002_single_ended_simpletest.py
|
sommersoft/Adafruit_CircuitPython_MCP3xxx
|
94088a7e2b30f1b34e8a5fd7076075d88aad460b
|
[
"MIT"
] | null | null | null |
import busio
import digitalio
import board
import adafruit_mcp3xxx.mcp3002 as MCP
from adafruit_mcp3xxx.analog_in import AnalogIn
# create the spi bus
spi = busio.SPI(clock=board.SCK, MISO=board.MISO, MOSI=board.MOSI)
# create the cs (chip select)
cs = digitalio.DigitalInOut(board.D5)
# create the mcp object
mcp = MCP.MCP3002(spi, cs)
# create an analog input channel on pin 0
chan = AnalogIn(mcp, MCP.P0)
print("Raw ADC Value: ", chan.value)
print("ADC Voltage: " + str(chan.voltage) + "V")
| 23.761905
| 66
| 0.747495
| 80
| 499
| 4.625
| 0.525
| 0.072973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030445
| 0.144289
| 499
| 20
| 67
| 24.95
| 0.836066
| 0.216433
| 0
| 0
| 0
| 0
| 0.07513
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.454545
| 0
| 0.454545
| 0.181818
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
07eb8c54a1c0d882798ebdd645e52dda754bb70e
| 759
|
py
|
Python
|
glue/core/data_factories/tables.py
|
rosteen/glue
|
ed71979f8e0e41f993a2363b3b5a8f8c3167a130
|
[
"BSD-3-Clause"
] | 550
|
2015-01-08T13:51:06.000Z
|
2022-03-31T11:54:47.000Z
|
glue/core/data_factories/tables.py
|
mmorys/glue
|
b58ced518ba6f56c59a4e03ffe84afa47235e193
|
[
"BSD-3-Clause"
] | 1,362
|
2015-01-03T19:15:52.000Z
|
2022-03-30T13:23:11.000Z
|
glue/core/data_factories/tables.py
|
mmorys/glue
|
b58ced518ba6f56c59a4e03ffe84afa47235e193
|
[
"BSD-3-Clause"
] | 142
|
2015-01-08T13:08:00.000Z
|
2022-03-18T13:25:57.000Z
|
from glue.core.data_factories.helpers import has_extension
from glue.config import data_factory
__all__ = ['tabular_data']
@data_factory(label="ASCII Table",
identifier=has_extension('csv txt tsv tbl dat '
'csv.gz txt.gz tbl.bz '
'dat.gz'),
priority=1)
def tabular_data(path, **kwargs):
from glue.core.data_factories.astropy_table import astropy_tabular_data
from glue.core.data_factories.pandas import pandas_read_table
for fac in [astropy_tabular_data, pandas_read_table]:
try:
return fac(path, **kwargs)
except Exception:
pass
else:
raise IOError("Could not parse file: %s" % path)
| 33
| 75
| 0.613966
| 93
| 759
| 4.774194
| 0.516129
| 0.072072
| 0.081081
| 0.108108
| 0.168919
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001894
| 0.304348
| 759
| 22
| 76
| 34.5
| 0.839015
| 0
| 0
| 0
| 0
| 0
| 0.123847
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0.055556
| 0.222222
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07ee95bf0289bb4f328ba250a0e725c6cb917270
| 2,073
|
py
|
Python
|
d00dfeed/analyses/print_sloc_per_soc.py
|
rehosting/rehosting_sok
|
499b625c8aa60020f311df97a6253820982f20d4
|
[
"MIT"
] | 4
|
2021-09-17T02:37:08.000Z
|
2022-02-15T01:44:41.000Z
|
d00dfeed/analyses/print_sloc_per_soc.py
|
rehosting/rehosting_sok
|
499b625c8aa60020f311df97a6253820982f20d4
|
[
"MIT"
] | null | null | null |
d00dfeed/analyses/print_sloc_per_soc.py
|
rehosting/rehosting_sok
|
499b625c8aa60020f311df97a6253820982f20d4
|
[
"MIT"
] | null | null | null |
# External deps
import os, sys, json
from pathlib import Path
from typing import Dict, List
# Internal deps
os.chdir(sys.path[0])
sys.path.append("..")
import df_common as dfc
import analyses_common as ac
# Generated files directory
GEN_FILE_DIR = str(Path(__file__).resolve().parent.parent) + os.sep + "generated_files" # TODO: ugly parent.parent pathing
if os.path.exists(GEN_FILE_DIR):
sys.path.append(GEN_FILE_DIR)
if os.path.exists(os.path.join(GEN_FILE_DIR, "sloc_cnt.py")):
from sloc_cnt import DRIVER_NAME_TO_SLOC
else:
print("Error: no SLOC file! Run \'df_analyze.py\' with \'--linux-src-dir\'")
sys.exit(1)
if __name__ == "__main__":
json_files = ac.argparse_and_get_files("Graph SLOC/SoC data")
soc_sloc_by_arch: Dict[str, List[int]] = {}
print("Gathering SLOC average by arch...")
from graph_dd_sloc_by_arch import get_sloc_avg_and_list_by_arch
cmp_by_arch = ac.build_dict_two_lvl_cnt(json_files, dfc.JSON_ARC, dfc.JSON_CMP_STR)
avg_sloc_by_arch, sloc_list_by_arch = get_sloc_avg_and_list_by_arch(cmp_by_arch, verbose = False)
# Collection
print("Iterating DTBs/SoCs...")
for dtb_json in json_files:
with open(dtb_json) as json_file:
data = json.load(json_file)
soc_sloc = 0
arch = data[dfc.JSON_ARC]
cmp_strs = data[dfc.JSON_CMP_STR]
# Total SLOC for this SoC
for cmp_str in cmp_strs:
driver_sloc = dfc.cmp_str_to_sloc(cmp_str)
if not driver_sloc: # Closed-source driver
driver_sloc = avg_sloc_by_arch[arch]
soc_sloc += driver_sloc
#print("{}: {}".format(cmp_str, driver_sloc))
if arch not in soc_sloc_by_arch:
soc_sloc_by_arch[arch] = []
else:
soc_sloc_by_arch[arch].append(soc_sloc)
print("{} ({}): {}".format(dtb_json.split(os.sep)[-1], arch, soc_sloc))
# Final stats
ac.print_mean_median_std_dev_for_dict_of_lists(soc_sloc_by_arch,
"\nSloc Per Soc, format: [arch : (mean, median, std_dev)]\n")
| 32.904762
| 122
| 0.673903
| 328
| 2,073
| 3.893293
| 0.317073
| 0.065779
| 0.062647
| 0.050901
| 0.076742
| 0.050117
| 0.050117
| 0.050117
| 0.050117
| 0.050117
| 0
| 0.002452
| 0.213218
| 2,073
| 62
| 123
| 33.435484
| 0.780503
| 0.095514
| 0
| 0.04878
| 0
| 0
| 0.132045
| 0
| 0
| 0
| 0
| 0.016129
| 0
| 1
| 0
| false
| 0
| 0.170732
| 0
| 0.170732
| 0.121951
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
07f12eb8f08aef21196193b3111071cb20b8013a
| 1,884
|
py
|
Python
|
silver_bullet/crypto.py
|
Hojung-Jeong/Silver-Bullet-Encryption-Tool
|
5ea29b3cd78cf7488e0cbdcf4ea60d7c9151c2a7
|
[
"Apache-2.0"
] | null | null | null |
silver_bullet/crypto.py
|
Hojung-Jeong/Silver-Bullet-Encryption-Tool
|
5ea29b3cd78cf7488e0cbdcf4ea60d7c9151c2a7
|
[
"Apache-2.0"
] | null | null | null |
silver_bullet/crypto.py
|
Hojung-Jeong/Silver-Bullet-Encryption-Tool
|
5ea29b3cd78cf7488e0cbdcf4ea60d7c9151c2a7
|
[
"Apache-2.0"
] | null | null | null |
'''
>List of functions
1. encrypt(user_input,passphrase) - Encrypt the given string with the given passphrase. Returns cipher text and locked pad.
2. decrypt(cipher_text,locked_pad,passphrase) - Decrypt the cipher text encrypted with SBET. It requires cipher text, locked pad, and passphrase.
'''
# CODE ========================================================================
import zlib
import random
from hashlib import sha1
from silver_bullet.TRNG import trlist
from silver_bullet.contain_value import contain
ascii_value=256
def ciphering(target_list,pad,decrypt=False):
result=[]
for counter in range(len(pad)):
if decrypt==False:
operated=contain(target_list[counter]+pad[counter],ascii_value)
else:
operated=contain(int(target_list[counter])-pad[counter],ascii_value)
result.append(operated)
return result
def locker(pad,passphrase):
cutter=round(len(passphrase)/2)
splited=[passphrase[:cutter],passphrase[cutter:]]
locker=[0 for counter in range(len(pad))]
for element in splited:
bloated_seed=sha1(element.encode()).hexdigest()
random.seed(bloated_seed)
locker=[contain(random.randrange(ascii_value)+element,ascii_value) for element in locker]
holder=[]
for counter in range(len(pad)):
operated=int(pad[counter])^locker[counter]
holder.append(operated)
return holder
def encrypt(user_input,passphrase):
compressed=zlib.compress(user_input.encode())
ui_listed=list(compressed)
pad=trlist(len(ui_listed),ascii_value)
ct=ciphering(ui_listed,pad)
lp=locker(pad,passphrase)
cipher_text=' '.join(map(str,ct))
locked_pad=' '.join(map(str,lp))
return cipher_text, locked_pad
def decrypt(cipher_text,locked_pad,passphrase):
ct=cipher_text.split(' ')
lp=locked_pad.split(' ')
pad=locker(lp,passphrase)
pt=ciphering(ct,pad,True)
byted=bytes(pt)
decompressed=zlib.decompress(byted).decode()
return decompressed
| 24.789474
| 146
| 0.735669
| 260
| 1,884
| 5.215385
| 0.323077
| 0.058997
| 0.047198
| 0.056047
| 0.158555
| 0.158555
| 0.054572
| 0
| 0
| 0
| 0
| 0.00536
| 0.108811
| 1,884
| 76
| 147
| 24.789474
| 0.802263
| 0.213907
| 0
| 0.044444
| 0
| 0
| 0.002653
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088889
| false
| 0.155556
| 0.111111
| 0
| 0.288889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07f21adedf8ef7aa0ba52361a9cf4372ad43ac9a
| 4,967
|
py
|
Python
|
app/nextMoveLogic.py
|
thekitbag/starter-snake-python
|
48d12d2fa61ecfc976cd5750316b1db49a641f7f
|
[
"MIT"
] | null | null | null |
app/nextMoveLogic.py
|
thekitbag/starter-snake-python
|
48d12d2fa61ecfc976cd5750316b1db49a641f7f
|
[
"MIT"
] | null | null | null |
app/nextMoveLogic.py
|
thekitbag/starter-snake-python
|
48d12d2fa61ecfc976cd5750316b1db49a641f7f
|
[
"MIT"
] | null | null | null |
import random
class Status(object):
def getHeadPosition(gamedata):
me = gamedata['you']
my_position = me['body']
head = my_position[0]
return head
def getMyLength(gamedata):
me = gamedata['you']
my_position = me['body']
if my_position[0] == my_position[1] == my_position[2]:
return 1
elif my_position[1] == my_position[2]:
return 2
else: return len(my_position)
def getMyDirection(gamedata):
me = gamedata['you']
my_position = me['body']
if Status.getMyLength(gamedata) == 1:
return 'none'
elif my_position[0]['x'] > my_position[1]['x']:
return 'right'
elif my_position[0]['x'] < my_position[1]['x']:
return 'left'
elif my_position[0]['x'] == my_position[1]['x'] and my_position[0]['y'] < my_position[1]['y']:
return 'up'
else: return 'down'
def getHealth(gamedata):
pass
def getBoardSize(gamedata):
board_height = gamedata['board']['height']
board_width = gamedata['board']['width']
dimensions = {'height': board_height, 'width': board_width}
return dimensions
def getFoodPositions(gamedata):
pass
def getSnakesPositions(gamedata):
pass
class Assess(object):
def wallProximity(gamedata):
"""returns proximity to a wall
either parallel to, head-on or corner"""
head = Status.getHeadPosition(gamedata)
board_size = Status.getBoardSize(gamedata)
direction = Status.getMyDirection(gamedata)
height = board_size['height'] - 1
width = board_size['width'] - 1
#corners
if head['x'] == 0 and head['y'] == 0:
return {'type': 'corner', 'identifier': 'top left', 'direction': direction}
elif head['x'] == 0 and head['y'] == height:
return {'type': 'corner', 'identifier': 'bottom left', 'direction': direction}
elif head['x'] == width and head['y'] == 0:
return {'type': 'corner', 'identifier': 'top right', 'direction': direction}
elif head['x'] == width and head['y'] == height:
return {'type': 'corner', 'identifier': 'bottom right', 'direction': direction}
#headons
elif head['x'] == 0 and direction == 'left':
return {'type': 'head-on', 'identifier': 'left', 'direction': direction}
elif head['y'] == 0 and direction == 'up':
return {'type': 'head-on', 'identifier': 'top', 'direction': direction}
elif head['x'] == width and direction == 'right':
return {'type': 'head-on', 'identifier': 'right', 'direction': direction}
elif head['y'] == height and direction == 'down':
return {'type': 'head-on', 'identifier': 'bottom', 'direction': direction}
#parrallels
elif head['x'] == 0 and direction == 'up' or head['x'] == 0 and direction == 'down':
return {'type': 'parallel', 'identifier': 'left', 'direction': direction}
elif head['y'] == 0 and direction == 'right' or head['y'] == 0 and direction =='left':
return {'type': 'parallel', 'identifier': 'top', 'direction': direction}
elif head['x'] == width and direction =='down' or head['x'] == width and direction == 'up':
return {'type': 'parallel', 'identifier': 'right', 'direction': direction}
elif head['y'] == height and direction == 'left' or head['y'] == height and direction == 'right':
return {'type': 'parallel', 'identifier': 'bottom', 'direction': direction}
else: return False
def ownBodyProximity(gamedata):
pass
def killPossible(gamedata):
pass
def smallerSnakeNearby(gamedata):
pass
def biggerSnakeNearby(gamedata):
pass
def foodNearby(gamedata):
pass
class Action(object):
def avoidDeath():
pass
def chaseFood():
pass
def fleeSnake():
pass
def chaseSnake():
pass
class Decision(object):
def chooseBestOption(gamedata):
options = ['up', 'down', 'right', 'left']
current_direction = Status.getMyDirection(gamedata)
#first go
if current_direction == 'none':
choice = random.choice(options)
#remove opposite direction
if current_direction == 'up':
options.remove('down')
if current_direction == 'down':
options.remove('up')
if current_direction == 'right':
options.remove('left')
if current_direction == 'left':
options.remove('right')
#no danger keep going
if Assess.wallProximity(gamedata) == False:
choice = current_direction
#in a corner
elif Assess.wallProximity(gamedata)['type'] == 'corner':
options.remove(current_direction)
if Assess.wallProximity(gamedata)['identifier'][0] == 't' and Assess.wallProximity(gamedata)['identifier'][4] == 'l':
if 'up' in options:
choice = 'down'
else: choice = 'right'
elif Assess.wallProximity(gamedata)['identifier'][0] == 't' and Assess.wallProximity(gamedata)['identifier'][4] == 'r':
if 'up' in options:
choice = 'down'
else: choice = 'left'
#headon
elif Assess.wallProximity(gamedata)['type'] == 'head-on':
options.remove(current_direction)
choice = random.choice(options)
#parallel
elif Assess.wallProximity(gamedata)['type'] == 'parallel':
choice = current_direction
else: print("shit")
print(options)
return choice
| 29.217647
| 122
| 0.655124
| 604
| 4,967
| 5.32947
| 0.155629
| 0.055918
| 0.06151
| 0.072693
| 0.462255
| 0.370923
| 0.331469
| 0.314073
| 0.282075
| 0.177074
| 0
| 0.008033
| 0.172941
| 4,967
| 169
| 123
| 29.390533
| 0.77556
| 0.034629
| 0
| 0.227642
| 0
| 0
| 0.155635
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.146341
| false
| 0.097561
| 0.00813
| 0
| 0.357724
| 0.01626
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07fb390e2fe8908e8e3a429d629ca30f1d77df66
| 11,225
|
py
|
Python
|
test/test_python_errors.py
|
yangyangxcf/parso
|
e496b07b6342f6182225a60aad6031d7ad08f24d
|
[
"PSF-2.0"
] | null | null | null |
test/test_python_errors.py
|
yangyangxcf/parso
|
e496b07b6342f6182225a60aad6031d7ad08f24d
|
[
"PSF-2.0"
] | null | null | null |
test/test_python_errors.py
|
yangyangxcf/parso
|
e496b07b6342f6182225a60aad6031d7ad08f24d
|
[
"PSF-2.0"
] | null | null | null |
"""
Testing if parso finds syntax errors and indentation errors.
"""
import sys
import warnings
import pytest
import parso
from parso._compatibility import is_pypy
from .failing_examples import FAILING_EXAMPLES, indent, build_nested
if is_pypy:
# The errors in PyPy might be different. Just skip the module for now.
pytestmark = pytest.mark.skip()
def _get_error_list(code, version=None):
grammar = parso.load_grammar(version=version)
tree = grammar.parse(code)
return list(grammar.iter_errors(tree))
def assert_comparison(code, error_code, positions):
errors = [(error.start_pos, error.code) for error in _get_error_list(code)]
assert [(pos, error_code) for pos in positions] == errors
@pytest.mark.parametrize('code', FAILING_EXAMPLES)
def test_python_exception_matches(code):
wanted, line_nr = _get_actual_exception(code)
errors = _get_error_list(code)
actual = None
if errors:
error, = errors
actual = error.message
assert actual in wanted
# Somehow in Python3.3 the SyntaxError().lineno is sometimes None
assert line_nr is None or line_nr == error.start_pos[0]
def test_non_async_in_async():
"""
This example doesn't work with FAILING_EXAMPLES, because the line numbers
are not always the same / incorrect in Python 3.8.
"""
if sys.version_info[:2] < (3, 5):
pytest.skip()
# Raises multiple errors in previous versions.
code = 'async def foo():\n def nofoo():[x async for x in []]'
wanted, line_nr = _get_actual_exception(code)
errors = _get_error_list(code)
if errors:
error, = errors
actual = error.message
assert actual in wanted
if sys.version_info[:2] < (3, 8):
assert line_nr == error.start_pos[0]
else:
assert line_nr == 0 # For whatever reason this is zero in Python 3.8+
@pytest.mark.parametrize(
('code', 'positions'), [
('1 +', [(1, 3)]),
('1 +\n', [(1, 3)]),
('1 +\n2 +', [(1, 3), (2, 3)]),
('x + 2', []),
('[\n', [(2, 0)]),
('[\ndef x(): pass', [(2, 0)]),
('[\nif 1: pass', [(2, 0)]),
('1+?', [(1, 2)]),
('?', [(1, 0)]),
('??', [(1, 0)]),
('? ?', [(1, 0)]),
('?\n?', [(1, 0), (2, 0)]),
('? * ?', [(1, 0)]),
('1 + * * 2', [(1, 4)]),
('?\n1\n?', [(1, 0), (3, 0)]),
]
)
def test_syntax_errors(code, positions):
assert_comparison(code, 901, positions)
@pytest.mark.parametrize(
('code', 'positions'), [
(' 1', [(1, 0)]),
('def x():\n 1\n 2', [(3, 0)]),
('def x():\n 1\n 2', [(3, 0)]),
('def x():\n1', [(2, 0)]),
]
)
def test_indentation_errors(code, positions):
assert_comparison(code, 903, positions)
def _get_actual_exception(code):
with warnings.catch_warnings():
# We don't care about warnings where locals/globals misbehave here.
# It's as simple as either an error or not.
warnings.filterwarnings('ignore', category=SyntaxWarning)
try:
compile(code, '<unknown>', 'exec')
except (SyntaxError, IndentationError) as e:
wanted = e.__class__.__name__ + ': ' + e.msg
line_nr = e.lineno
except ValueError as e:
# The ValueError comes from byte literals in Python 2 like '\x'
# that are oddly enough not SyntaxErrors.
wanted = 'SyntaxError: (value error) ' + str(e)
line_nr = None
else:
assert False, "The piece of code should raise an exception."
# SyntaxError
# Python 2.6 has a bit different error messages here, so skip it.
if sys.version_info[:2] == (2, 6) and wanted == 'SyntaxError: unexpected EOF while parsing':
wanted = 'SyntaxError: invalid syntax'
if wanted == 'SyntaxError: non-keyword arg after keyword arg':
# The python 3.5+ way, a bit nicer.
wanted = 'SyntaxError: positional argument follows keyword argument'
elif wanted == 'SyntaxError: assignment to keyword':
return [wanted, "SyntaxError: can't assign to keyword",
'SyntaxError: cannot assign to __debug__'], line_nr
elif wanted == 'SyntaxError: assignment to None':
# Python 2.6 does has a slightly different error.
wanted = 'SyntaxError: cannot assign to None'
elif wanted == 'SyntaxError: can not assign to __debug__':
# Python 2.6 does has a slightly different error.
wanted = 'SyntaxError: cannot assign to __debug__'
elif wanted == 'SyntaxError: can use starred expression only as assignment target':
# Python 3.4/3.4 have a bit of a different warning than 3.5/3.6 in
# certain places. But in others this error makes sense.
return [wanted, "SyntaxError: can't use starred expression here"], line_nr
elif wanted == 'SyntaxError: f-string: unterminated string':
wanted = 'SyntaxError: EOL while scanning string literal'
elif wanted == 'SyntaxError: f-string expression part cannot include a backslash':
return [
wanted,
"SyntaxError: EOL while scanning string literal",
"SyntaxError: unexpected character after line continuation character",
], line_nr
elif wanted == "SyntaxError: f-string: expecting '}'":
wanted = 'SyntaxError: EOL while scanning string literal'
elif wanted == 'SyntaxError: f-string: empty expression not allowed':
wanted = 'SyntaxError: invalid syntax'
elif wanted == "SyntaxError: f-string expression part cannot include '#'":
wanted = 'SyntaxError: invalid syntax'
elif wanted == "SyntaxError: f-string: single '}' is not allowed":
wanted = 'SyntaxError: invalid syntax'
return [wanted], line_nr
def test_default_except_error_postition():
# For this error the position seemed to be one line off, but that doesn't
# really matter.
code = 'try: pass\nexcept: pass\nexcept X: pass'
wanted, line_nr = _get_actual_exception(code)
error, = _get_error_list(code)
assert error.message in wanted
assert line_nr != error.start_pos[0]
# I think this is the better position.
assert error.start_pos[0] == 2
def test_statically_nested_blocks():
def build(code, depth):
if depth == 0:
return code
new_code = 'if 1:\n' + indent(code)
return build(new_code, depth - 1)
def get_error(depth, add_func=False):
code = build('foo', depth)
if add_func:
code = 'def bar():\n' + indent(code)
errors = _get_error_list(code)
if errors:
assert errors[0].message == 'SyntaxError: too many statically nested blocks'
return errors[0]
return None
assert get_error(19) is None
assert get_error(19, add_func=True) is None
assert get_error(20)
assert get_error(20, add_func=True)
def test_future_import_first():
def is_issue(code, *args):
code = code % args
return bool(_get_error_list(code))
i1 = 'from __future__ import division'
i2 = 'from __future__ import absolute_import'
assert not is_issue(i1)
assert not is_issue(i1 + ';' + i2)
assert not is_issue(i1 + '\n' + i2)
assert not is_issue('"";' + i1)
assert not is_issue('"";' + i1)
assert not is_issue('""\n' + i1)
assert not is_issue('""\n%s\n%s', i1, i2)
assert not is_issue('""\n%s;%s', i1, i2)
assert not is_issue('"";%s;%s ', i1, i2)
assert not is_issue('"";%s\n%s ', i1, i2)
assert is_issue('1;' + i1)
assert is_issue('1\n' + i1)
assert is_issue('"";1\n' + i1)
assert is_issue('""\n%s\nfrom x import a\n%s', i1, i2)
assert is_issue('%s\n""\n%s', i1, i2)
def test_named_argument_issues(works_not_in_py):
message = works_not_in_py.get_error_message('def foo(*, **dict): pass')
message = works_not_in_py.get_error_message('def foo(*): pass')
if works_not_in_py.version.startswith('2'):
assert message == 'SyntaxError: invalid syntax'
else:
assert message == 'SyntaxError: named arguments must follow bare *'
works_not_in_py.assert_no_error_in_passing('def foo(*, name): pass')
works_not_in_py.assert_no_error_in_passing('def foo(bar, *, name=1): pass')
works_not_in_py.assert_no_error_in_passing('def foo(bar, *, name=1, **dct): pass')
def test_escape_decode_literals(each_version):
"""
We are using internal functions to assure that unicode/bytes escaping is
without syntax errors. Here we make a bit of quality assurance that this
works through versions, because the internal function might change over
time.
"""
def get_msg(end, to=1):
base = "SyntaxError: (unicode error) 'unicodeescape' " \
"codec can't decode bytes in position 0-%s: " % to
return base + end
def get_msgs(escape):
return (get_msg('end of string in escape sequence'),
get_msg(r"truncated %s escape" % escape))
error, = _get_error_list(r'u"\x"', version=each_version)
assert error.message in get_msgs(r'\xXX')
error, = _get_error_list(r'u"\u"', version=each_version)
assert error.message in get_msgs(r'\uXXXX')
error, = _get_error_list(r'u"\U"', version=each_version)
assert error.message in get_msgs(r'\UXXXXXXXX')
error, = _get_error_list(r'u"\N{}"', version=each_version)
assert error.message == get_msg(r'malformed \N character escape', to=2)
error, = _get_error_list(r'u"\N{foo}"', version=each_version)
assert error.message == get_msg(r'unknown Unicode character name', to=6)
# Finally bytes.
error, = _get_error_list(r'b"\x"', version=each_version)
wanted = r'SyntaxError: (value error) invalid \x escape'
if sys.version_info >= (3, 0):
# The positioning information is only available in Python 3.
wanted += ' at position 0'
assert error.message == wanted
def test_too_many_levels_of_indentation():
assert not _get_error_list(build_nested('pass', 99))
assert _get_error_list(build_nested('pass', 100))
base = 'def x():\n if x:\n'
assert not _get_error_list(build_nested('pass', 49, base=base))
assert _get_error_list(build_nested('pass', 50, base=base))
@pytest.mark.parametrize(
'code', [
"f'{*args,}'",
r'f"\""',
r'f"\\\""',
r'fr"\""',
r'fr"\\\""',
r"print(f'Some {x:.2f} and some {y}')",
]
)
def test_valid_fstrings(code):
assert not _get_error_list(code, version='3.6')
@pytest.mark.parametrize(
('code', 'message'), [
("f'{1+}'", ('invalid syntax')),
(r'f"\"', ('invalid syntax')),
(r'fr"\"', ('invalid syntax')),
]
)
def test_invalid_fstrings(code, message):
"""
Some fstring errors are handled differntly in 3.6 and other versions.
Therefore check specifically for these errors here.
"""
error, = _get_error_list(code, version='3.6')
assert message in error.message
@pytest.mark.parametrize(
'code', [
"from foo import (\nbar,\n rab,\n)",
"from foo import (bar, rab, )",
]
)
def test_trailing_comma(code):
errors = _get_error_list(code)
assert not errors
| 34.860248
| 96
| 0.625568
| 1,546
| 11,225
| 4.372574
| 0.207633
| 0.031953
| 0.035503
| 0.023669
| 0.383728
| 0.331953
| 0.29142
| 0.219379
| 0.203698
| 0.156805
| 0
| 0.020049
| 0.240178
| 11,225
| 321
| 97
| 34.968847
| 0.772541
| 0.132829
| 0
| 0.173913
| 0
| 0
| 0.25599
| 0
| 0
| 0
| 0
| 0
| 0.221739
| 1
| 0.091304
| false
| 0.052174
| 0.052174
| 0.004348
| 0.195652
| 0.004348
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07fd108f6337b8e7a88da0155cf318b6098e4ae4
| 2,585
|
py
|
Python
|
src/grader/machine.py
|
MrKaStep/csc230-grader
|
559846f4d921c5c4be6b6e9ba8629fb24b448e41
|
[
"MIT"
] | null | null | null |
src/grader/machine.py
|
MrKaStep/csc230-grader
|
559846f4d921c5c4be6b6e9ba8629fb24b448e41
|
[
"MIT"
] | null | null | null |
src/grader/machine.py
|
MrKaStep/csc230-grader
|
559846f4d921c5c4be6b6e9ba8629fb24b448e41
|
[
"MIT"
] | null | null | null |
import getpass
from plumbum import local
from plumbum.machines.paramiko_machine import ParamikoMachine
from plumbum.path.utils import copy
def _once(f):
res = None
def wrapped(*args, **kwargs):
nonlocal res
if res is None:
res = f(*args, **kwargs)
return res
return wrapped
@_once
def get_remote_machine_with_password(host, user):
password = getpass.getpass(prompt=f"Password for {user}@{host}: ", stream=None)
rem = ParamikoMachine(host, user=user, password=password)
return rem
@_once
def get_remote_machine(host, user, keyfile):
rem = ParamikoMachine(host, user=user, keyfile=keyfile)
return rem
def get_local_machine():
return local
def with_machine_rule(cls):
old_init = cls.__init__
def new_init(self, config):
if "machine" not in config:
machine_type = "local"
else:
machine_type = config["machine"]["type"]
if machine_type == "local":
self.machine = get_local_machine()
self.files_to_copy = None
elif machine_type == "remote":
if "keyfile" in config["machine"]:
self.machine = get_remote_machine(config["machine"]["host"], config["machine"]["user"], config["machine"]["keyfile"])
else:
self.machine = get_remote_machine_with_password(config["machine"]["host"], config["machine"]["user"])
self.files_to_copy = config["machine"].get("files_to_copy")
else:
raise ValueError(f"Invalid machine type: {config['machine']['type']}")
self.machine_type = machine_type
old_init(self, config)
cls.__init__ = new_init
old_apply = cls.apply
def new_apply(self, project):
with self.machine.tempdir() as tempdir:
project_path = tempdir / "project"
project_path.mkdir()
existing_files = set([f.name for f in project.root.list()])
if self.files_to_copy:
for fname in self.files_to_copy:
if fname in existing_files:
copy(project.root / fname, project_path / fname)
else:
for f in project.files():
if f.name in existing_files:
copy(f.path, project_path / f.name)
with self.machine.cwd(project_path):
self.session = self.machine.session()
self.session.run(f"cd {project_path}")
return old_apply(self, project)
cls.apply = new_apply
return cls
| 32.3125
| 133
| 0.600387
| 311
| 2,585
| 4.787781
| 0.215434
| 0.087307
| 0.036938
| 0.040296
| 0.20685
| 0.045668
| 0
| 0
| 0
| 0
| 0
| 0
| 0.292843
| 2,585
| 79
| 134
| 32.721519
| 0.814551
| 0
| 0
| 0.125
| 0
| 0
| 0.087848
| 0.010449
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0.078125
| 0.0625
| 0.015625
| 0.296875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07ff0da6e717ab9585c2e512803b8604ff985d37
| 2,793
|
py
|
Python
|
tests/test_tree.py
|
andreax79/airflow-code-editor
|
031170387496bbc6d540179c6c2f1765e1e70694
|
[
"Apache-2.0"
] | 194
|
2019-08-06T13:03:11.000Z
|
2022-03-25T15:29:29.000Z
|
tests/test_tree.py
|
andreax79/airflow-code-editor
|
031170387496bbc6d540179c6c2f1765e1e70694
|
[
"Apache-2.0"
] | 29
|
2019-08-23T16:07:17.000Z
|
2022-03-31T03:43:47.000Z
|
tests/test_tree.py
|
andreax79/airflow-code-editor
|
031170387496bbc6d540179c6c2f1765e1e70694
|
[
"Apache-2.0"
] | 32
|
2019-08-15T12:13:37.000Z
|
2022-03-31T17:27:24.000Z
|
#!/usr/bin/env python
import os
import os.path
import airflow
import airflow.plugins_manager
from airflow import configuration
from flask import Flask
from unittest import TestCase, main
from airflow_code_editor.commons import PLUGIN_NAME
from airflow_code_editor.tree import (
get_tree,
)
assert airflow.plugins_manager
app = Flask(__name__)
class TestTree(TestCase):
def setUp(self):
self.root_dir = os.path.dirname(os.path.realpath(__file__))
configuration.conf.set(PLUGIN_NAME, 'git_init_repo', 'False')
configuration.conf.set(PLUGIN_NAME, 'root_directory', self.root_dir)
def test_tree(self):
with app.app_context():
t = get_tree()
self.assertTrue(len(t) > 0)
self.assertTrue('git' in (x['id'] for x in t))
def test_tags(self):
with app.app_context():
t = get_tree("tags")
self.assertIsNotNone(t)
def test_local_branches(self):
with app.app_context():
t = get_tree("local-branches")
self.assertIsNotNone(t)
def test_remote_branches(self):
with app.app_context():
t = get_tree("remote-branches")
self.assertIsNotNone(t)
def test_files(self):
with app.app_context():
t = get_tree("files")
self.assertTrue(
len([x.get('id') for x in t if x.get('id') == 'test_utils.py']) == 1
)
t = get_tree("files/folder")
self.assertTrue(len([x.get('id') for x in t if x.get('id') == '1']) == 1)
def test_git(self):
with app.app_context():
t = get_tree("git/HEAD")
self.assertTrue(t is not None)
class TestTreeGitDisabled(TestCase):
def setUp(self):
self.root_dir = os.path.dirname(os.path.realpath(__file__))
configuration.conf.set(PLUGIN_NAME, 'git_init_repo', 'False')
configuration.conf.set(PLUGIN_NAME, 'root_directory', self.root_dir)
configuration.conf.set(PLUGIN_NAME, 'git_enabled', 'False')
def test_tree(self):
with app.app_context():
t = get_tree()
self.assertTrue(len(t) > 0)
self.assertTrue('git' not in (x['id'] for x in t))
t = get_tree("tags")
self.assertEqual(t, [])
t = get_tree("local-branches")
self.assertEqual(t, [])
t = get_tree("remote-branches")
self.assertEqual(t, [])
t = get_tree("files")
self.assertTrue(
len([x.get('id') for x in t if x.get('id') == 'test_utils.py']) == 1
)
t = get_tree("files/folder")
self.assertTrue(len([x.get('id') for x in t if x.get('id') == '1']) == 1)
if __name__ == '__main__':
main()
| 31.382022
| 85
| 0.583602
| 371
| 2,793
| 4.191375
| 0.191375
| 0.063023
| 0.066881
| 0.063023
| 0.736334
| 0.708682
| 0.612862
| 0.561415
| 0.510611
| 0.463023
| 0
| 0.003982
| 0.280702
| 2,793
| 88
| 86
| 31.738636
| 0.770035
| 0.007161
| 0
| 0.597222
| 0
| 0
| 0.090188
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.125
| false
| 0
| 0.125
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
07ff31219d3e42ddfa090b695c0d4b6ede8d31e9
| 2,826
|
py
|
Python
|
examples/token_freshness.py
|
greenape/flask-jwt-extended
|
11ac3bf0937ee199aea7d6dc47c748bef9bf1d2f
|
[
"MIT"
] | 2
|
2021-03-20T01:55:08.000Z
|
2021-11-14T12:20:23.000Z
|
examples/token_freshness.py
|
greenape/flask-jwt-extended
|
11ac3bf0937ee199aea7d6dc47c748bef9bf1d2f
|
[
"MIT"
] | 1
|
2020-08-06T23:02:45.000Z
|
2020-09-26T01:36:21.000Z
|
examples/token_freshness.py
|
greenape/flask-jwt-extended
|
11ac3bf0937ee199aea7d6dc47c748bef9bf1d2f
|
[
"MIT"
] | 1
|
2020-10-28T20:09:00.000Z
|
2020-10-28T20:09:00.000Z
|
from quart import Quart, jsonify, request
from quart_jwt_extended import (
JWTManager,
jwt_required,
create_access_token,
jwt_refresh_token_required,
create_refresh_token,
get_jwt_identity,
fresh_jwt_required,
)
app = Quart(__name__)
app.config["JWT_SECRET_KEY"] = "super-secret" # Change this!
jwt = JWTManager(app)
# Standard login endpoint. Will return a fresh access token and
# a refresh token
@app.route("/login", methods=["POST"])
async def login():
username = (await request.get_json()).get("username", None)
password = (await request.get_json()).get("password", None)
if username != "test" or password != "test":
return {"msg": "Bad username or password"}, 401
# create_access_token supports an optional 'fresh' argument,
# which marks the token as fresh or non-fresh accordingly.
# As we just verified their username and password, we are
# going to mark the token as fresh here.
ret = {
"access_token": create_access_token(identity=username, fresh=True),
"refresh_token": create_refresh_token(identity=username),
}
return ret, 200
# Refresh token endpoint. This will generate a new access token from
# the refresh token, but will mark that access token as non-fresh,
# as we do not actually verify a password in this endpoint.
@app.route("/refresh", methods=["POST"])
@jwt_refresh_token_required
async def refresh():
current_user = get_jwt_identity()
new_token = create_access_token(identity=current_user, fresh=False)
ret = {"access_token": new_token}
return ret, 200
# Fresh login endpoint. This is designed to be used if we need to
# make a fresh token for a user (by verifying they have the
# correct username and password). Unlike the standard login endpoint,
# this will only return a new access token, so that we don't keep
# generating new refresh tokens, which entirely defeats their point.
@app.route("/fresh-login", methods=["POST"])
async def fresh_login():
username = (await request.get_json()).get("username", None)
password = (await request.get_json()).get("password", None)
if username != "test" or password != "test":
return {"msg": "Bad username or password"}, 401
new_token = create_access_token(identity=username, fresh=True)
ret = {"access_token": new_token}
return ret, 200
# Any valid JWT can access this endpoint
@app.route("/protected", methods=["GET"])
@jwt_required
async def protected():
username = get_jwt_identity()
return dict(logged_in_as=username), 200
# Only fresh JWTs can access this endpoint
@app.route("/protected-fresh", methods=["GET"])
@fresh_jwt_required
async def protected_fresh():
username = get_jwt_identity()
return dict(fresh_logged_in_as=username), 200
if __name__ == "__main__":
app.run()
| 33.247059
| 75
| 0.714084
| 398
| 2,826
| 4.89196
| 0.28392
| 0.067797
| 0.043657
| 0.039034
| 0.402671
| 0.327684
| 0.276323
| 0.237288
| 0.154083
| 0.154083
| 0
| 0.009044
| 0.178344
| 2,826
| 84
| 76
| 33.642857
| 0.829457
| 0.31564
| 0
| 0.288462
| 0
| 0
| 0.13309
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.115385
| 0.038462
| 0
| 0.173077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07ffdb3c18cae37c2fe662c5c84ed5398af39b35
| 1,345
|
py
|
Python
|
keras/linear/model/pipeline_train.py
|
PipelineAI/models
|
d8df07877aa8b10ce9b84983bb440af75e84dca7
|
[
"Apache-2.0"
] | 44
|
2017-11-17T06:19:05.000Z
|
2021-11-03T06:00:56.000Z
|
keras/linear/model/pipeline_train.py
|
PipelineAI/models
|
d8df07877aa8b10ce9b84983bb440af75e84dca7
|
[
"Apache-2.0"
] | 3
|
2018-08-09T14:28:17.000Z
|
2018-09-10T03:32:42.000Z
|
keras/linear/model/pipeline_train.py
|
PipelineAI/models
|
d8df07877aa8b10ce9b84983bb440af75e84dca7
|
[
"Apache-2.0"
] | 21
|
2017-11-18T15:12:12.000Z
|
2020-08-15T07:08:33.000Z
|
import os
os.environ['KERAS_BACKEND'] = 'theano'
os.environ['THEANO_FLAGS'] = 'floatX=float32,device=cpu'
import cloudpickle as pickle
import pipeline_invoke
import pandas as pd
import numpy as np
import keras
from keras.layers import Input, Dense
from keras.models import Model
from keras.models import save_model, load_model
from sklearn.preprocessing import StandardScaler, MinMaxScaler, Normalizer
if __name__ == '__main__':
df = pd.read_csv("../input/training/training.csv")
df["People per Television"] = pd.to_numeric(df["People per Television"],errors='coerce')
df = df.dropna()
x = df["People per Television"].values.reshape(-1,1).astype(np.float64)
y = df["People per Physician"].values.reshape(-1,1).astype(np.float64)
# min-max -1,1
sc = MinMaxScaler(feature_range=(-1,1))
x_ = sc.fit_transform(x)
y_ = sc.fit_transform(y)
inputs = Input(shape=(1,))
preds = Dense(1,activation='linear')(inputs)
model = Model(inputs=inputs,outputs=preds)
sgd = keras.optimizers.SGD()
model.compile(optimizer=sgd ,loss='mse')
model.fit(x_,y_, batch_size=1, verbose=1, epochs=10, shuffle=False)
save_model(model, 'state/keras_theano_linear_model_state.h5')
# model_pkl_path = 'model.pkl'
# with open(model_pkl_path, 'wb') as fh:
# pickle.dump(pipeline_invoke, fh)
| 30.568182
| 92
| 0.709294
| 196
| 1,345
| 4.69898
| 0.464286
| 0.034745
| 0.047774
| 0.068404
| 0.065147
| 0.065147
| 0.065147
| 0
| 0
| 0
| 0
| 0.018437
| 0.15316
| 1,345
| 43
| 93
| 31.27907
| 0.790167
| 0.094424
| 0
| 0
| 0
| 0
| 0.191261
| 0.078318
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.357143
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
580134063c60e1903557dccde046d7a394258b01
| 319
|
py
|
Python
|
dictionary.py
|
SchmitzAndrew/OSS-101-example
|
1efecd4c5bfef4495904568d11e3f8d0a5ed9bd0
|
[
"MIT"
] | null | null | null |
dictionary.py
|
SchmitzAndrew/OSS-101-example
|
1efecd4c5bfef4495904568d11e3f8d0a5ed9bd0
|
[
"MIT"
] | null | null | null |
dictionary.py
|
SchmitzAndrew/OSS-101-example
|
1efecd4c5bfef4495904568d11e3f8d0a5ed9bd0
|
[
"MIT"
] | null | null | null |
word = input("Enter a word: ")
if word == "a":
print("one; any")
elif word == "apple":
print("familiar, round fleshy fruit")
elif word == "rhinoceros":
print("large thick-skinned animal with one or two horns on its nose")
else:
print("That word must not exist. This dictionary is very comprehensive.")
| 29
| 77
| 0.667712
| 47
| 319
| 4.531915
| 0.765957
| 0.075117
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.197492
| 319
| 10
| 78
| 31.9
| 0.832031
| 0
| 0
| 0
| 0
| 0
| 0.595611
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.444444
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
58035ad02fa85d7c60de0ef4d5c14279175bc2ac
| 566
|
py
|
Python
|
setup.py
|
sdnhub/kube-navi
|
d16a9289ba7261011e6c8d19c48cdc9bd533e629
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
sdnhub/kube-navi
|
d16a9289ba7261011e6c8d19c48cdc9bd533e629
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
sdnhub/kube-navi
|
d16a9289ba7261011e6c8d19c48cdc9bd533e629
|
[
"Apache-2.0"
] | null | null | null |
from distutils.core import setup
setup(
name = 'kube_navi',
packages = ['kube_navi'], # this must be the same as the name above
version = '0.1',
description = 'Kubernetes resource discovery toolkit',
author = 'Srini Seetharaman',
author_email = '[email protected]',
url = 'https://github.com/sdnhub/kube-navi', # use the URL to the github repo
download_url = 'https://github.com/sdnhub/kube-navi/archive/0.1.tar.gz', # I'll explain this in a second
keywords = ['testing', 'logging', 'example'], # arbitrary keywords
classifiers = [],
)
| 40.428571
| 106
| 0.69788
| 79
| 566
| 4.949367
| 0.670886
| 0.081841
| 0.071611
| 0.086957
| 0.158568
| 0.158568
| 0.158568
| 0
| 0
| 0
| 0
| 0.008403
| 0.159011
| 566
| 13
| 107
| 43.538462
| 0.813025
| 0.210247
| 0
| 0
| 0
| 0.076923
| 0.479638
| 0.061086
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.076923
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
5805a2c8d616906daf19682b40baa91f10a88715
| 1,845
|
py
|
Python
|
app/routes/register.py
|
AuFeld/COAG
|
3874a9c1c6ceb908a6bbabfb49e2c701d8e54f20
|
[
"MIT"
] | 1
|
2021-06-03T10:29:12.000Z
|
2021-06-03T10:29:12.000Z
|
app/routes/register.py
|
AuFeld/COAG
|
3874a9c1c6ceb908a6bbabfb49e2c701d8e54f20
|
[
"MIT"
] | 45
|
2021-06-05T14:47:09.000Z
|
2022-03-30T06:16:44.000Z
|
app/routes/register.py
|
AuFeld/COAG
|
3874a9c1c6ceb908a6bbabfb49e2c701d8e54f20
|
[
"MIT"
] | null | null | null |
from typing import Callable, Optional, Type, cast
from fastapi import APIRouter, HTTPException, Request, status
from app.models import users
from app.common.user import ErrorCode, run_handler
from app.users.user import (
CreateUserProtocol,
InvalidPasswordException,
UserAlreadyExists,
ValidatePasswordProtocol,
)
def get_register_router(
create_user: CreateUserProtocol,
user_model: Type[users.BaseUser],
user_create_model: Type[users.BaseUserCreate],
after_register: Optional[Callable[[users.UD, Request], None]] = None,
validate_password: Optional[ValidatePasswordProtocol] = None,
) -> APIRouter:
"""Generate a router with the register route."""
router = APIRouter()
@router.post(
"/register", response_model=user_model, status_code=status.HTTP_201_CREATED
)
async def register(request: Request, user: user_create_model): # type: ignore
user = cast(users.BaseUserCreate, user) # Prevent mypy complain
if validate_password:
try:
await validate_password(user.password, user)
except InvalidPasswordException as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail={
"code": ErrorCode.REGISTER_INVALID_PASSWORD,
"reason": e.reason,
},
)
try:
created_user = await create_user(user, safe=True)
except UserAlreadyExists:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorCode.REGISTER_USER_ALREADY_EXISTS,
)
if after_register:
await run_handler(after_register, created_user, request)
return created_user
return router
| 32.946429
| 83
| 0.648238
| 184
| 1,845
| 6.293478
| 0.380435
| 0.018135
| 0.041451
| 0.051813
| 0.098446
| 0.098446
| 0.098446
| 0.098446
| 0.098446
| 0.098446
| 0
| 0.006767
| 0.279133
| 1,845
| 55
| 84
| 33.545455
| 0.86391
| 0.042276
| 0
| 0.133333
| 0
| 0
| 0.010795
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022222
| false
| 0.155556
| 0.111111
| 0
| 0.177778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6afebab1780e5e05d2dbd1b300b2e8c2a43c36a7
| 17,003
|
py
|
Python
|
apps/UI_phone_mcdm.py
|
industrial-optimization-group/researchers-night
|
68f2fcb8530032e157badda772a795e1f3bb2c4b
|
[
"MIT"
] | null | null | null |
apps/UI_phone_mcdm.py
|
industrial-optimization-group/researchers-night
|
68f2fcb8530032e157badda772a795e1f3bb2c4b
|
[
"MIT"
] | null | null | null |
apps/UI_phone_mcdm.py
|
industrial-optimization-group/researchers-night
|
68f2fcb8530032e157badda772a795e1f3bb2c4b
|
[
"MIT"
] | null | null | null |
import dash
from dash.exceptions import PreventUpdate
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output, State
import dash_bootstrap_components as dbc
import dash_table
import plotly.express as ex
import plotly.graph_objects as go
import pandas as pd
import numpy as np
data = pd.read_csv("./data/Phone_dataset_new.csv", header=0)
details = pd.read_csv("./data/Phone_details.csv", header=0)
names = details.loc[0]
data = data.rename(columns=names)
details = details.rename(columns=names)
maxi = details.loc[1].astype(int)
details_on_card = details.loc[2].astype(int)
details_on_card = details.columns[details_on_card == 1]
fitness_columns = {
"Memory": -1,
"RAM": -1,
"Camera (MP)": -1,
"Price (Euros)": 1,
}
fitness_data = data[fitness_columns] * maxi[fitness_columns].values
external_stylesheets = ["https://codepen.io/chriddyp/pen/bWLwgP.css"]
app = dash.Dash(
__name__,
external_stylesheets=[dbc.themes.LITERA],
eager_loading=True,
suppress_callback_exceptions=True,
)
app.layout = html.Div(
children=[
# .container class is fixed, .container.scalable is scalable
dbc.Row(
[
dbc.Col(
html.H1(
children="What is your optimal phone?",
className="text-center mt-4",
)
)
]
),
dbc.Row(
[
dbc.Col(
children=[
# Top card with details(?)
dbc.Card(
children=[
dbc.CardBody(
[
html.H4(
"Researcher's Night Event",
className="card-title text-center",
),
html.P(
(
"This app uses decision support tools to "
"quickly and easily find phones which reflect "
"the user's desires. Input your preferences "
"below. The box on top right shows the phone "
"which matches the preferences the best. "
"The box on bottom right provides some "
"close alternatives."
),
className="card-text",
),
]
)
],
className="mr-3 ml-3 mb-2 mt-2",
),
dbc.Form(
[
dbc.FormGroup(
children=[
dbc.Label(
"Choose desired operating system",
html_for="os-choice",
),
dbc.RadioItems(
options=[
{
"label": "Android",
"value": "Android",
},
{"label": "iOS", "value": "IOS"},
{
"label": "No preference",
"value": "both",
},
],
id="os-choice",
value="both",
inline=True,
# className="text-center mt-4",
),
],
className="mr-3 ml-3 mb-2 mt-2",
),
dbc.FormGroup(
children=[
dbc.Label(
"Choose desired Memory capacity (GB)",
html_for="memory-choice",
),
dcc.Slider(
id="memory-choice",
min=16,
max=256,
step=None,
included=False,
value=256,
marks={
16: "16",
32: "32",
64: "64",
128: "128",
256: "256",
},
# className="text-center mt-5",
),
],
className="mr-3 ml-3 mb-2 mt-2",
),
dbc.FormGroup(
children=[
dbc.Label(
"Choose desired RAM capacity (GB)",
html_for="ram-choice",
),
dcc.Slider(
id="ram-choice",
min=2,
max=12,
step=1,
value=12,
included=False,
marks={
2: "2",
3: "3",
4: "4",
5: "5",
6: "6",
7: "7",
8: "8",
9: "9",
10: "10",
11: "11",
12: "12",
},
className="text-center mt-5",
),
],
className="mr-3 ml-3 mb-2 mt-2",
),
dbc.FormGroup(
children=[
dbc.Label(
"Choose desired camera resolution (MP)",
html_for="cam-choice",
),
dcc.Slider(
id="cam-choice",
min=0,
max=130,
step=1,
included=False,
value=70,
marks={
0: "0",
10: "10",
30: "30",
50: "50",
70: "70",
90: "90",
110: "110",
130: "130",
},
className="text-center mt-5",
),
],
className="mr-3 ml-3 mb-2 mt-2",
),
dbc.FormGroup(
children=[
dbc.Label(
"Choose desired budget (Euros)",
html_for="cost-choice",
),
dcc.Slider(
id="cost-choice",
min=0,
max=1400,
step=1,
included=False,
value=100,
marks={
0: "0",
200: "200",
400: "400",
600: "600",
800: "800",
1000: "1000",
1200: "1200",
1400: "1400",
},
className="text-center mt-5",
),
],
className="mr-3 ml-3 mb-2 mt-2",
),
],
style={"maxHeight": "560px", "overflow": "auto"},
),
],
width={"size": 5, "offset": 1},
),
dbc.Col(
children=[
dbc.Card(
children=[
dbc.CardHeader("The best phone for you is:"),
dbc.CardBody(id="results"),
],
className="mb-4",
),
dbc.Card(
children=[
dbc.CardHeader("Other great phones:"),
dbc.CardBody(
id="other-results",
children=(
[
html.P(
html.Span(
f"{i}. ",
id=f"other-results-list-{i}",
)
)
for i in range(2, 6)
]
+ [
dbc.Tooltip(
id=f"other-results-tooltip-{i}",
target=f"other-results-list-{i}",
placement="right",
style={
"maxWidth": 700,
"background-color": "white",
"color": "white",
"border-style": "solid",
"border-color": "black",
},
)
for i in range(2, 6)
]
),
),
],
className="mt-4",
),
html.Div(id="tooltips"),
],
width={"size": 5, "offset": 0},
className="mb-2 mt-2",
),
]
),
dbc.Row([html.Div(id="callback-dump")]),
],
)
@app.callback(
[
Output("results", "children"),
*[Output(f"other-results-list-{i}", "children") for i in range(2, 6)],
*[Output(f"other-results-tooltip-{i}", "children") for i in range(2, 6)],
],
[
Input(f"{attr}-choice", "value")
for attr in ["os", "memory", "ram", "cam", "cost"]
],
)
def results(*choices):
if choices[0] == "both":
choice_data = data
elif choices[0] == "IOS":
choice_data = data[[True if "IOS" in st else False for st in data["OS"]]]
if choices[0] == "Android":
choice_data = data[[True if "Android" in st else False for st in data["OS"]]]
relevant_data = choice_data[
["Memory", "RAM", "Camera (MP)", "Price (Euros)",]
].reset_index(drop=True)
card_data = choice_data[details_on_card].reset_index(drop=True)
maxi = np.asarray([-1, -1, -1, 1])
relevant_data = relevant_data * maxi
ideal = relevant_data.min().values
nadir = relevant_data.max().values
aspirations = choices[1:] * maxi
distance = (aspirations - relevant_data) / (ideal - nadir)
distance = distance.max(axis=1)
distance_order = np.argsort(distance)
best = table_from_data(card_data.loc[distance_order.values[0]], choices[1:])
total_number = len(distance_order)
if total_number >= 4:
others, tooltips = other_options(card_data.loc[distance_order.values[1:5]])
else:
others, tooltips = other_options(
card_data.loc[distance_order.values[1:total_number]]
)
others = others + [f"{i}. -" for i in range(len(others) + 2, 6)]
tooltips = tooltips + [None for i in range(len(tooltips) + 2, 6)]
return (best, *others, *tooltips)
"""@app.callback(Output("tooltips", "children"), [Input("callback-dump", "children")])
def tooltips(tooldict):
num = len(tooldict["ids"])
content = []
for i in range(num):
content.append(dbc.Tooltip(tooldict["tables"][i], target=tooldict["ids"][i]))
return content"""
def table_from_data(data, choices):
# print(choices)
to_compare = ["Memory", "RAM", "Camera (MP)", "Price (Euros)"]
# print(data[to_compare].values)
diff = (data[to_compare].values - choices) * [1, 1, 1, -1]
colors = [None, None, None] + ["green" if x >= 0 else "red" for x in diff]
# print(np.sign(diff))
return dbc.Table(
[
html.Tbody(
[
html.Tr(
[
html.Th(col),
html.Td([str(data[col]),],),
html.Td([html.Span(" ▉", style={"color": c,},)],),
]
)
for (col, c) in zip(data.index, colors)
]
)
]
)
def table_from_data_horizontal(data):
header = [html.Thead(html.Tr([html.Th(col) for col in data.index]))]
body = [html.Tbody([html.Tr([html.Td(data[col]) for col in data.index])])]
return dbc.Table(header + body)
def other_options(data):
contents = []
tables = []
ids = []
i = 2
for index, row in data.iterrows():
contents.append(f"{i}. {row['Model']}")
tables.append(table_from_data_horizontal(row))
i = i + 1
return contents, tables
if __name__ == "__main__":
app.run_server(debug=False)
| 43.485934
| 95
| 0.283597
| 1,115
| 17,003
| 4.24574
| 0.253812
| 0.020913
| 0.007393
| 0.008872
| 0.263836
| 0.17744
| 0.133502
| 0.124842
| 0.115547
| 0.104563
| 0
| 0.040494
| 0.628183
| 17,003
| 390
| 96
| 43.597436
| 0.708162
| 0.012351
| 0
| 0.309456
| 0
| 0
| 0.098339
| 0.010186
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011461
| false
| 0
| 0.031519
| 0
| 0.054441
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed0fc8cf4f946e650eb4b14f0a5d7690952a62a3
| 980
|
py
|
Python
|
python/old_password_test.py
|
XelaRellum/old_password
|
b461941069bc7f1187776a992f86c89317ab215e
|
[
"MIT"
] | null | null | null |
python/old_password_test.py
|
XelaRellum/old_password
|
b461941069bc7f1187776a992f86c89317ab215e
|
[
"MIT"
] | null | null | null |
python/old_password_test.py
|
XelaRellum/old_password
|
b461941069bc7f1187776a992f86c89317ab215e
|
[
"MIT"
] | null | null | null |
import unittest
import pytest
from old_password import old_password
import csv
import re
@pytest.mark.parametrize("password,expected_hash", [
(None, None),
("", ""),
("a", "60671c896665c3fa"),
("abc", "7cd2b5942be28759"),
("ä", "0751368d49315f7f"),
])
def test_old_password(password, expected_hash):
assert old_password(password) == expected_hash
def test_password_with_space():
"""
spaces in password are skipped
"""
assert old_password("pass word") == old_password("password")
def test_password_with_tab():
"""
tabs in password are skipped
"""
assert old_password("pass\tword") == old_password("password")
def test_password_from_testdata():
with open("../testdata.csv", "r") as file:
for line in file:
line = line.strip()
password, expected_hash = line.split(";")
hash = old_password(password)
assert hash == expected_hash, "password: %s" % password
| 22.272727
| 67
| 0.643878
| 112
| 980
| 5.419643
| 0.383929
| 0.163097
| 0.156507
| 0.088962
| 0.349259
| 0.247117
| 0.135091
| 0.135091
| 0
| 0
| 0
| 0.04712
| 0.220408
| 980
| 43
| 68
| 22.790698
| 0.747382
| 0.060204
| 0
| 0
| 0
| 0
| 0.15618
| 0.024719
| 0
| 0
| 0
| 0
| 0.16
| 1
| 0.16
| false
| 0.48
| 0.2
| 0
| 0.36
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
ed17fa4c7a350d13f37c06feb06cdcd3b65f55bf
| 859
|
gyp
|
Python
|
binding.gyp
|
HupuInc/node-mysql-listener
|
d23e55910acd1559d8339f36b1549f21aee8adaa
|
[
"MIT"
] | 2
|
2015-10-04T02:09:11.000Z
|
2021-02-03T00:12:28.000Z
|
binding.gyp
|
HupuInc/node-mysql-listener
|
d23e55910acd1559d8339f36b1549f21aee8adaa
|
[
"MIT"
] | 1
|
2015-10-04T02:10:02.000Z
|
2015-10-05T07:29:40.000Z
|
binding.gyp
|
HupuInc/node-mysql-listener
|
d23e55910acd1559d8339f36b1549f21aee8adaa
|
[
"MIT"
] | null | null | null |
{
'targets': [
{
# have to specify 'liblib' here since gyp will remove the first one :\
'target_name': 'mysql_bindings',
'sources': [
'src/mysql_bindings.cc',
'src/mysql_bindings_connection.cc',
'src/mysql_bindings_result.cc',
'src/mysql_bindings_statement.cc',
],
'conditions': [
['OS=="win"', {
# no Windows support yet...
}, {
'libraries': [
'<!@(mysql_config --libs_r)'
],
}],
['OS=="mac"', {
# cflags on OS X are stupid and have to be defined like this
'xcode_settings': {
'OTHER_CFLAGS': [
'<!@(mysql_config --cflags)'
]
}
}, {
'cflags': [
'<!@(mysql_config --cflags)'
],
}]
]
}
]
}
| 23.861111
| 76
| 0.436554
| 75
| 859
| 4.8
| 0.64
| 0.180556
| 0.177778
| 0.15
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.407451
| 859
| 35
| 77
| 24.542857
| 0.707269
| 0.178114
| 0
| 0.21875
| 0
| 0
| 0.424501
| 0.159544
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed1acc095f46eeb713b4bbe4bbc113d4ca38760c
| 399
|
py
|
Python
|
setup.py
|
rlbellaire/ActT
|
b6e936e5037c5f92ad1c281e2bf3700bf91aea42
|
[
"BSD-3-Clause"
] | 2
|
2020-01-24T20:20:02.000Z
|
2021-09-25T03:32:17.000Z
|
setup.py
|
rlbellaire/ActT
|
b6e936e5037c5f92ad1c281e2bf3700bf91aea42
|
[
"BSD-3-Clause"
] | 1
|
2020-11-16T17:08:08.000Z
|
2020-11-16T17:08:08.000Z
|
setup.py
|
rlbellaire/ActT
|
b6e936e5037c5f92ad1c281e2bf3700bf91aea42
|
[
"BSD-3-Clause"
] | 1
|
2020-11-16T16:58:39.000Z
|
2020-11-16T16:58:39.000Z
|
from setuptools import find_packages, setup
setup(name='ActT',
version='0.6',
description='Active Testing',
url='',
author='',
author_email='none',
license='BSD',
packages=find_packages(),
install_requires=[
'numpy', 'pandas', 'matplotlib','scipy','scikit-learn','opencv-python',
'statswag','tensorflow'
],
zip_safe=True)
| 24.9375
| 79
| 0.588972
| 40
| 399
| 5.75
| 0.875
| 0.104348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006645
| 0.245614
| 399
| 15
| 80
| 26.6
| 0.757475
| 0
| 0
| 0
| 0
| 0
| 0.243108
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.071429
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed1b9e6a531c569f1a1cfb6234bd90d5b845bbe9
| 1,909
|
py
|
Python
|
src/quanguru/classes/exceptions.py
|
Qfabiolous/QuanGuru
|
285ca44ae857cc61337f73ea2eb600f485a09e32
|
[
"BSD-3-Clause"
] | null | null | null |
src/quanguru/classes/exceptions.py
|
Qfabiolous/QuanGuru
|
285ca44ae857cc61337f73ea2eb600f485a09e32
|
[
"BSD-3-Clause"
] | null | null | null |
src/quanguru/classes/exceptions.py
|
Qfabiolous/QuanGuru
|
285ca44ae857cc61337f73ea2eb600f485a09e32
|
[
"BSD-3-Clause"
] | null | null | null |
# TODO turn prints into actual error raise, they are print for testing
def qSystemInitErrors(init):
def newFunction(obj, **kwargs):
init(obj, **kwargs)
if obj._genericQSys__dimension is None:
className = obj.__class__.__name__
print(className + ' requires a dimension')
elif obj.frequency is None:
className = obj.__class__.__name__
print(className + ' requires a frequency')
return newFunction
def qCouplingInitErrors(init):
def newFunction(obj, *args, **kwargs):
init(obj, *args, **kwargs)
if obj.couplingOperators is None: # pylint: disable=protected-access
className = obj.__class__.__name__
print(className + ' requires a coupling functions')
elif obj.coupledSystems is None: # pylint: disable=protected-access
className = obj.__class__.__name__
print(className + ' requires a coupling systems')
#for ind in range(len(obj._qCoupling__qSys)):
# if len(obj._qCoupling__cFncs) != len(obj._qCoupling__qSys):
# className = obj.__class__.__name__
# print(className + ' requires same number of systems as coupling functions')
return newFunction
def sweepInitError(init):
def newFunction(obj, **kwargs):
init(obj, **kwargs)
if obj.sweepList is None:
className = obj.__class__.__name__
print(className + ' requires either a list or relevant info, here are givens'
+ '\n' + # noqa: W503, W504
'sweepList: ', obj.sweepList, '\n' + # noqa: W504
'sweepMax: ', obj.sweepMax, '\n' + # noqa: W504
'sweepMin: ', obj.sweepMin, '\n' + # noqa: W504
'sweepPert: ', obj.sweepPert, '\n' + # noqa: W504
'logSweep: ', obj.logSweep)
return newFunction
| 40.617021
| 92
| 0.600838
| 200
| 1,909
| 5.435
| 0.35
| 0.066237
| 0.093836
| 0.115915
| 0.417663
| 0.417663
| 0.417663
| 0.378105
| 0.378105
| 0.333027
| 0
| 0.013413
| 0.297014
| 1,909
| 46
| 93
| 41.5
| 0.796572
| 0.223677
| 0
| 0.363636
| 0
| 0
| 0.149081
| 0
| 0
| 0
| 0
| 0.021739
| 0
| 1
| 0.181818
| false
| 0
| 0
| 0
| 0.272727
| 0.151515
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed1c74c77f9a61e232ea9a2a837cdc1274993efb
| 6,997
|
py
|
Python
|
reagent/gym/tests/test_gym.py
|
alexnikulkov/ReAgent
|
e404c5772ea4118105c2eb136ca96ad5ca8e01db
|
[
"BSD-3-Clause"
] | null | null | null |
reagent/gym/tests/test_gym.py
|
alexnikulkov/ReAgent
|
e404c5772ea4118105c2eb136ca96ad5ca8e01db
|
[
"BSD-3-Clause"
] | null | null | null |
reagent/gym/tests/test_gym.py
|
alexnikulkov/ReAgent
|
e404c5772ea4118105c2eb136ca96ad5ca8e01db
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import logging
import os
import pprint
import unittest
import numpy as np
# pyre-fixme[21]: Could not find module `pytest`.
import pytest
import torch
from parameterized import parameterized
from reagent.core.types import RewardOptions
from reagent.gym.agents.agent import Agent
from reagent.gym.agents.post_step import train_with_replay_buffer_post_step
from reagent.gym.envs.union import Env__Union
from reagent.gym.runners.gymrunner import evaluate_for_n_episodes, run_episode
from reagent.gym.utils import build_normalizer, fill_replay_buffer
from reagent.model_managers.model_manager import ModelManager
from reagent.model_managers.union import ModelManager__Union
from reagent.replay_memory.circular_replay_buffer import ReplayBuffer
from reagent.tensorboardX import summary_writer_context
from reagent.test.base.horizon_test_base import HorizonTestBase
from torch.utils.tensorboard import SummaryWriter
try:
# Use internal runner or OSS otherwise
from reagent.runners.fb.fb_batch_runner import FbBatchRunner as BatchRunner
except ImportError:
from reagent.runners.oss_batch_runner import OssBatchRunner as BatchRunner
# for seeding the environment
SEED = 0
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
"""
Put on-policy gym tests here in the format (test name, path to yaml config).
Format path to be: "configs/<env_name>/<model_name>_<env_name>_online.yaml."
NOTE: These tests should ideally finish quickly (within 10 minutes) since they are
unit tests which are run many times.
"""
GYM_TESTS = [
("Discrete DQN Cartpole", "configs/cartpole/discrete_dqn_cartpole_online.yaml"),
("Discrete C51 Cartpole", "configs/cartpole/discrete_c51_cartpole_online.yaml"),
("Discrete QR Cartpole", "configs/cartpole/discrete_qr_cartpole_online.yaml"),
(
"Discrete DQN Open Gridworld",
"configs/open_gridworld/discrete_dqn_open_gridworld.yaml",
),
("SAC Pendulum", "configs/pendulum/sac_pendulum_online.yaml"),
("TD3 Pendulum", "configs/pendulum/td3_pendulum_online.yaml"),
("Parametric DQN Cartpole", "configs/cartpole/parametric_dqn_cartpole_online.yaml"),
(
"Parametric SARSA Cartpole",
"configs/cartpole/parametric_sarsa_cartpole_online.yaml",
),
(
"Sparse DQN Changing Arms",
"configs/sparse/discrete_dqn_changing_arms_online.yaml",
),
("SlateQ RecSim", "configs/recsim/slate_q_recsim_online.yaml"),
("PossibleActionsMask DQN", "configs/functionality/dqn_possible_actions_mask.yaml"),
]
curr_dir = os.path.dirname(__file__)
class TestGym(HorizonTestBase):
# pyre-fixme[16]: Module `parameterized` has no attribute `expand`.
@parameterized.expand(GYM_TESTS)
def test_gym_cpu(self, name: str, config_path: str):
logger.info(f"Starting {name} on CPU")
self.run_from_config(
run_test=run_test,
config_path=os.path.join(curr_dir, config_path),
use_gpu=False,
)
logger.info(f"{name} passes!")
# pyre-fixme[16]: Module `parameterized` has no attribute `expand`.
@parameterized.expand(GYM_TESTS)
@pytest.mark.serial
# pyre-fixme[56]: Argument `not torch.cuda.is_available()` to decorator factory
# `unittest.skipIf` could not be resolved in a global scope.
@unittest.skipIf(not torch.cuda.is_available(), "CUDA not available")
def test_gym_gpu(self, name: str, config_path: str):
logger.info(f"Starting {name} on GPU")
self.run_from_config(
run_test=run_test,
config_path=os.path.join(curr_dir, config_path),
use_gpu=True,
)
logger.info(f"{name} passes!")
def run_test(
env: Env__Union,
model: ModelManager__Union,
replay_memory_size: int,
train_every_ts: int,
train_after_ts: int,
num_train_episodes: int,
passing_score_bar: float,
num_eval_episodes: int,
use_gpu: bool,
):
env = env.value
env.seed(SEED)
env.action_space.seed(SEED)
normalization = build_normalizer(env)
logger.info(f"Normalization is: \n{pprint.pformat(normalization)}")
manager: ModelManager = model.value
runner = BatchRunner(use_gpu, manager, RewardOptions(), normalization)
trainer = runner.initialize_trainer()
reporter = manager.get_reporter()
trainer.reporter = reporter
training_policy = manager.create_policy(trainer)
replay_buffer = ReplayBuffer(
replay_capacity=replay_memory_size, batch_size=trainer.minibatch_size
)
device = torch.device("cuda") if use_gpu else torch.device("cpu")
# first fill the replay buffer to burn_in
train_after_ts = max(train_after_ts, trainer.minibatch_size)
fill_replay_buffer(
env=env, replay_buffer=replay_buffer, desired_size=train_after_ts
)
post_step = train_with_replay_buffer_post_step(
replay_buffer=replay_buffer,
env=env,
trainer=trainer,
training_freq=train_every_ts,
batch_size=trainer.minibatch_size,
device=device,
)
agent = Agent.create_for_env(
env, policy=training_policy, post_transition_callback=post_step, device=device
)
writer = SummaryWriter()
with summary_writer_context(writer):
train_rewards = []
for i in range(num_train_episodes):
trajectory = run_episode(
env=env, agent=agent, mdp_id=i, max_steps=env.max_steps
)
ep_reward = trajectory.calculate_cumulative_reward()
train_rewards.append(ep_reward)
logger.info(
f"Finished training episode {i} (len {len(trajectory)})"
f" with reward {ep_reward}."
)
logger.info("============Train rewards=============")
logger.info(train_rewards)
logger.info(f"average: {np.mean(train_rewards)};\tmax: {np.max(train_rewards)}")
# Check whether the max score passed the score bar; we explore during training
# the return could be bad (leading to flakiness in C51 and QRDQN).
assert np.max(train_rewards) >= passing_score_bar, (
f"max reward ({np.max(train_rewards)})after training for "
f"{len(train_rewards)} episodes is less than < {passing_score_bar}.\n"
)
serving_policy = manager.create_serving_policy(normalization, trainer)
agent = Agent.create_for_env_with_serving_policy(env, serving_policy)
eval_rewards = evaluate_for_n_episodes(
n=num_eval_episodes, env=env, agent=agent, max_steps=env.max_steps
).squeeze(1)
logger.info("============Eval rewards==============")
logger.info(eval_rewards)
mean_eval = np.mean(eval_rewards)
logger.info(f"average: {mean_eval};\tmax: {np.max(eval_rewards)}")
assert (
mean_eval >= passing_score_bar
), f"Eval reward is {mean_eval}, less than < {passing_score_bar}.\n"
if __name__ == "__main__":
unittest.main()
| 36.253886
| 88
| 0.711019
| 910
| 6,997
| 5.214286
| 0.286813
| 0.030137
| 0.018546
| 0.0196
| 0.175553
| 0.132771
| 0.084299
| 0.084299
| 0.084299
| 0.084299
| 0
| 0.003686
| 0.185651
| 6,997
| 192
| 89
| 36.442708
| 0.829063
| 0.093754
| 0
| 0.089041
| 0
| 0
| 0.226025
| 0.122685
| 0
| 0
| 0
| 0.010417
| 0.013699
| 1
| 0.020548
| false
| 0.047945
| 0.157534
| 0
| 0.184932
| 0.013699
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed1f38ec9a444c4d387d2b1c3bbd4a46cc3895ba
| 2,132
|
py
|
Python
|
mcpython/common/block/ISlab.py
|
mcpython4-coding/core
|
e4c4f59dab68c90e2028db3add2e5065116bf4a6
|
[
"CC0-1.0",
"MIT"
] | 2
|
2019-11-02T05:26:11.000Z
|
2019-11-03T08:52:18.000Z
|
mcpython/common/block/ISlab.py
|
mcpython4-coding/core
|
e4c4f59dab68c90e2028db3add2e5065116bf4a6
|
[
"CC0-1.0",
"MIT"
] | 25
|
2019-11-02T05:24:29.000Z
|
2022-02-09T14:09:08.000Z
|
mcpython/common/block/ISlab.py
|
mcpython4-coding/core
|
e4c4f59dab68c90e2028db3add2e5065116bf4a6
|
[
"CC0-1.0",
"MIT"
] | 5
|
2019-11-09T05:36:06.000Z
|
2021-11-28T13:07:08.000Z
|
"""
mcpython - a minecraft clone written in python licenced under the MIT-licence
(https://github.com/mcpython4-coding/core)
Contributors: uuk, xkcdjerry (inactive)
Based on the game of fogleman (https://github.com/fogleman/Minecraft), licenced under the MIT-licence
Original game "minecraft" by Mojang Studios (www.minecraft.net), licenced under the EULA
(https://account.mojang.com/documents/minecraft_eula)
Mod loader inspired by "Minecraft Forge" (https://github.com/MinecraftForge/MinecraftForge) and similar
This project is not official by mojang and does not relate to it.
"""
import mcpython.common.block.AbstractBlock
import mcpython.engine.physics.AxisAlignedBoundingBox
import mcpython.util.enums
from mcpython.util.enums import SlabModes
BBOX_DICT = {
SlabModes.TOP: mcpython.engine.physics.AxisAlignedBoundingBox.AxisAlignedBoundingBox(
(1, 0.5, 1), (0, 0.5, 0)
),
SlabModes.BOTTOM: mcpython.engine.physics.AxisAlignedBoundingBox.AxisAlignedBoundingBox(
(1, 0.5, 1)
),
SlabModes.DOUBLE: mcpython.engine.physics.AxisAlignedBoundingBox.FULL_BLOCK_BOUNDING_BOX,
}
class ISlab(mcpython.common.block.AbstractBlock.AbstractBlock):
"""
Base class for slabs
"""
IS_SOLID = False
DEFAULT_FACE_SOLID = 0
def __init__(self):
super().__init__()
self.type = SlabModes.TOP
async def on_block_added(self):
if self.real_hit and self.real_hit[1] - self.position[1] > 0:
self.type = SlabModes.TOP
else:
self.type = SlabModes.BOTTOM
await self.schedule_network_update()
def get_model_state(self):
return {"type": self.type.name.lower()}
def set_model_state(self, state: dict):
if "type" in state:
self.type = SlabModes[state["type"].upper()]
DEBUG_WORLD_BLOCK_STATES = [{"type": x.name.upper()} for x in SlabModes]
async def on_player_interact(
self, player, itemstack, button, modifiers, exact_hit
) -> bool:
# todo: add half -> double convert
return False
def get_view_bbox(self):
return BBOX_DICT[self.type]
| 32.30303
| 103
| 0.701689
| 272
| 2,132
| 5.371324
| 0.452206
| 0.032854
| 0.057495
| 0.117728
| 0.130048
| 0.094456
| 0.094456
| 0.094456
| 0.094456
| 0
| 0
| 0.009878
| 0.192777
| 2,132
| 65
| 104
| 32.8
| 0.839047
| 0.297373
| 0
| 0.108108
| 0
| 0
| 0.010847
| 0
| 0
| 0
| 0
| 0.015385
| 0
| 1
| 0.108108
| false
| 0
| 0.108108
| 0.054054
| 0.405405
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed207a7611696af8395d372e4e8d01f42d7c6467
| 25,419
|
py
|
Python
|
CourseOutlineBackend/courseoutline/serializers.py
|
stancsz/web-development-project-ensf-607
|
03b11df4971afd4f27fee54a1800a40d4cc10240
|
[
"Apache-2.0"
] | null | null | null |
CourseOutlineBackend/courseoutline/serializers.py
|
stancsz/web-development-project-ensf-607
|
03b11df4971afd4f27fee54a1800a40d4cc10240
|
[
"Apache-2.0"
] | null | null | null |
CourseOutlineBackend/courseoutline/serializers.py
|
stancsz/web-development-project-ensf-607
|
03b11df4971afd4f27fee54a1800a40d4cc10240
|
[
"Apache-2.0"
] | null | null | null |
from rest_framework import serializers
from .models import *
class CoordinatorSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
FName = serializers.CharField(max_length=100, required=False)
LName = serializers.CharField(max_length=100, required=False)
Phone = serializers.CharField(max_length=100, required=False)
Office = serializers.CharField(max_length=100, required=False)
Email = serializers.CharField(max_length=100, required=False)
def create(self, validated_data):
# Once the request data has been validated, we can create a todo item instance in the database
return Coordinator.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
FName=validated_data.get('FName'),
LName=validated_data.get('LName'),
Phone=validated_data.get('Phone'),
Office=validated_data.get('Office'),
Email=validated_data.get('Email')
)
def update(self, instance, validated_data):
# Once the request data has been validated, we can update the todo item instance in the database
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.FName = validated_data.get('FName', instance.FName)
instance.LName = validated_data.get('LName', instance.LName)
instance.Phone = validated_data.get('Phone', instance.Phone)
instance.Office = validated_data.get('Office', instance.Office)
instance.Email = validated_data.get('Email', instance.Email)
instance.save()
return instance
class Meta:
model = Coordinator
fields = (
'ModelID',
'CourseID',
'FName',
'LName',
'Phone',
'Office',
'Email'
)
class InfoSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
GradeNotes = serializers.CharField(max_length=5000, required=False)
Examination = serializers.CharField(max_length=5000, required=False)
CourseDescription = serializers.CharField(max_length=5000, required=False)
UseCalc = serializers.CharField(max_length=100, required=False)
def create(self, validated_data):
return Info.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
GradeNotes=validated_data.get('GradeNotes'),
Examination=validated_data.get('Examination'),
CourseDescription=validated_data.get('CourseDescription'),
UseCalc=validated_data.get('UseCalc')
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.GradeNotes = validated_data.get('GradeNotes', instance.GradeNotes)
instance.Examination = validated_data.get('Examination', instance.Examination)
instance.CourseDescription = validated_data.get('CourseDescription', instance.CourseDescription)
instance.UseCalc = validated_data.get('UseCalc', instance.UseCalc)
instance.save()
return instance
class Meta:
model = Info
fields = (
'ModelID',
'CourseID',
'GradeNotes',
'Examination',
'CourseDescription',
'UseCalc'
)
class GradeDeterminationSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
Component = serializers.CharField(max_length=100, required=False)
OutcomeEvaluated = serializers.CharField(max_length=100, required=False)
Weight = serializers.IntegerField(required=False)
def create(self, validated_data):
# Once the request data has been validated, we can create a todo item instance in the database
return GradeDetermination.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
Component=validated_data.get('Component'),
OutcomeEvaluated=validated_data.get('OutcomeEvaluated'),
Weight=validated_data.get('Weight'),
)
def update(self, instance, validated_data):
# Once the request data has been validated, we can update the todo item instance in the database
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.Component = validated_data.get('Component', instance.Component)
instance.OutcomeEvaluated = validated_data.get('OutcomeEvaluated', instance.OutcomeEvaluated)
instance.Weight = validated_data.get('Weight', instance.Weight)
instance.save()
return instance
class Meta:
model = GradeDetermination
fields = (
'ModelID',
'CourseID',
'Component',
'OutcomeEvaluated',
'Weight'
)
class OutcomeSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
OutcomeNum = serializers.IntegerField(required=False) # removed max_length=100
Description = serializers.CharField(max_length=500, required=False) # Changed max_length to 500
GraduateAttribute = serializers.CharField(max_length=100, required=False)
InstructionLvl = serializers.CharField(max_length=100, required=False)
def create(self, validated_data):
return Outcome.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
OutcomeNum=validated_data.get('OutcomeNum'),
Description=validated_data.get('Description'),
GraduateAttribute=validated_data.get('GraduateAttribute'),
InstructionLvl=validated_data.get('InstructionLvl'),
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.OutcomeNum = validated_data.get('OutcomeNum', instance.OutcomeNum)
instance.Description = validated_data.get('Description', instance.Description)
instance.GraduateAttribute = validated_data.get('GraduateAttribute', instance.GraduateAttribute)
instance.InstructionLvl = validated_data.get('InstructionLvl', instance.InstructionLvl)
instance.save()
return instance
class Meta:
model = Outcome
fields = (
'ModelID',
'CourseID',
'OutcomeNum',
'Description',
'GraduateAttribute',
'InstructionLvl'
)
class TimetableSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
SectionNum = serializers.CharField(max_length=100, required=False)
Days = serializers.CharField(max_length=100, required=False)
Time = serializers.CharField(max_length=100, required=False)
Location = serializers.CharField(max_length=100, required=False)
def create(self, validated_data):
return Timetable.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
SectionNum=validated_data.get('SectionNum'),
Days=validated_data.get('Days'),
Time=validated_data.get('Time'),
Location=validated_data.get('Location'),
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.SectionNum = validated_data.get('SectionNum', instance.SectionNum)
instance.Days = validated_data.get('Days', instance.Days)
instance.Time = validated_data.get('Time', instance.Time)
instance.Location = validated_data.get('Location', instance.Location)
instance.save()
return instance
class Meta:
model = Timetable
fields = (
'ModelID',
'CourseID',
'SectionNum',
'Days',
'Time',
'Location'
)
class GradeDistributionSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
LowerLimit = serializers.IntegerField(required=False) # removed max_length = 100
UpperLimit = serializers.IntegerField(required=False) # removed max_length = 100
LetterGrade = serializers.CharField(max_length=100, required=False)
def create(self, validated_data):
return GradeDistribution.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
LowerLimit=validated_data.get('LowerLimit'),
UpperLimit=validated_data.get('UpperLimit'),
LetterGrade=validated_data.get('LetterGrade'),
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.LowerLimit = validated_data.get('LowerLimit', instance.LowerLimit)
instance.UpperLimit = validated_data.get('UpperLimit', instance.UpperLimit)
instance.LetterGrade = validated_data.get('LetterGrade', instance.LetterGrade)
instance.save()
return instance
class Meta:
model = GradeDistribution
fields = (
'ModelID',
'CourseID',
'LowerLimit',
'UpperLimit',
'LetterGrade'
)
class LectureSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
LectureNum = serializers.CharField(max_length=100, required=False)
FName = serializers.CharField(max_length=100, required=False)
LName = serializers.CharField(max_length=100, required=False)
Phone = serializers.CharField(max_length=100, required=False)
Office = serializers.CharField(max_length=100, required=False)
Email = serializers.CharField(max_length=100, required=False)
def create(self, validated_data):
return Lecture.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
LectureNum=validated_data.get('LectureNum'),
FName=validated_data.get('FName'),
LName=validated_data.get('LName'),
Phone=validated_data.get('Phone'),
Office=validated_data.get('Office'),
Email=validated_data.get('Email'),
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.LectureNum = validated_data.get('LectureNum', instance.LectureNum)
instance.FName = validated_data.get('FName', instance.FName)
instance.LName = validated_data.get('LName', instance.LName)
instance.Phone = validated_data.get('Phone', instance.Phone)
instance.Office = validated_data.get('Office', instance.Office)
instance.Email = validated_data.get('Email', instance.Email)
instance.save()
return instance
class Meta:
model = Lecture
fields = (
'ModelID',
'CourseID',
'LectureNum',
'FName',
'LName',
'Phone',
'Office',
'Email'
)
class TutorialSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
TutorialNum = serializers.CharField(max_length=100, required=False) # Changed Tutorial Num to CharField
FName = serializers.CharField(max_length=100, required=False) # Changed FName to CharField
LName = serializers.CharField(max_length=100, required=False)
Phone = serializers.CharField(max_length=100, required=False)
Office = serializers.CharField(max_length=100, required=False)
Email = serializers.CharField(max_length=100, required=False)
def create(self, validated_data):
return Tutorial.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
TutorialNum=validated_data.get('TutorialNum'),
FName=validated_data.get('FName'),
LName=validated_data.get('LName'),
Phone=validated_data.get('Phone'),
Office=validated_data.get('Office'),
Email=validated_data.get('Email'),
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.TutorialNum = validated_data.get('TutorialNum', instance.TutorialNum)
instance.FName = validated_data.get('FName', instance.FName)
instance.LName = validated_data.get('LName', instance.LName)
instance.Phone = validated_data.get('Phone', instance.Phone)
instance.Office = validated_data.get('Office', instance.Office)
instance.Email = validated_data.get('Email', instance.Email)
instance.save()
return instance
class Meta:
model = Tutorial
fields = (
'ModelID',
'CourseID',
'TutorialNum',
'FName',
'LName',
'Phone',
'Office',
'Email'
)
class CourseSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
CourseHours = serializers.CharField(max_length=100, required=False) # Changed CourseHours to CharField
CourseName = serializers.CharField(max_length=100, required=False) # Changed CourseName to CharField
CalenderRefrence = serializers.CharField(max_length=100, required=False)
AcademicCredit = serializers.IntegerField(required=False) # Changed AcademicCredit to IntegerField
DateCreated = serializers.CharField(max_length=100, required=False)
def create(self, validated_data):
return Course.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
CourseHours=validated_data.get('CourseHours'),
CourseName=validated_data.get('CourseName'),
CalenderRefrence=validated_data.get('CalenderRefrence'),
AcademicCredit=validated_data.get('AcademicCredit'),
DateCreated=validated_data.get('DateCreated'),
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.CourseHours = validated_data.get('CourseHours', instance.CourseHours)
instance.CourseName = validated_data.get('CourseName', instance.CourseName)
instance.CalenderRefrence = validated_data.get('CalenderRefrence', instance.CalenderRefrence)
instance.AcademicCredit = validated_data.get('AcademicCredit', instance.AcademicCredit)
instance.DateCreated = validated_data.get('DateCreated', instance.DateCreated)
instance.save()
return instance
class Meta:
model = Course
fields = (
'ModelID',
'CourseID',
'CourseHours',
'CourseName',
'CalenderRefrence',
'AcademicCredit',
'DateCreated'
)
class TextbookSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
TITLE = serializers.CharField(max_length=100, required=False)
Publisher = serializers.CharField(max_length=100, required=False)
Author = serializers.CharField(max_length=100, required=False)
Edition = serializers.CharField(max_length=100, required=False)
type = serializers.CharField(max_length=100, required=False)
def create(self, validated_data):
return Textbook.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
TITLE=validated_data.get('TITLE'),
Publisher=validated_data.get('Publisher'),
Author=validated_data.get('Author'),
Edition=validated_data.get('Edition'),
type=validated_data.get('type'),
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.TITLE = validated_data.get('TITLE', instance.TITLE)
instance.Publisher = validated_data.get('Publisher', instance.Publisher)
instance.Author = validated_data.get('Author', instance.Author)
instance.Edition = validated_data.get('Edition', instance.Edition)
instance.type = validated_data.get('type', instance.type)
instance.save()
return instance
class Meta:
model = Textbook
fields = (
'ModelID',
'CourseID',
'TITLE',
'Publisher',
'Author',
'Edition',
'type'
)
class AuWeightSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
Category = serializers.CharField(max_length=100, required=True)
AU = serializers.IntegerField(required=False)
def create(self, validated_data):
return AuWeight.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
Category=validated_data.get('Category'),
AU=validated_data.get('AU'),
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.Category = validated_data.get('Category', instance.Category)
instance.AU = validated_data.get('AU', instance.AU)
instance.save()
return instance
class Meta:
model = AuWeight
fields = (
'ModelID',
'CourseID',
'Category',
'AU'
)
class ContentCategorySerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
CategoryType = serializers.CharField(max_length=100, required=True)
Element = serializers.CharField(max_length=100, required=True)
def create(self, validated_data):
return ContentCategory.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
CategoryType=validated_data.get('CategoryType'),
Element=validated_data.get('Element'),
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.CategoryType = validated_data.get('CategoryType', instance.CategoryType)
instance.Element = validated_data.get('Element', instance.Element)
instance.save()
return instance
class Meta:
model = ContentCategory
fields = (
'ModelID',
'CourseID',
'CategoryType',
'Element'
)
class LabSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
LabNum = serializers.CharField(max_length=100, required=True)
NumberOfLabs = serializers.IntegerField(required=False)
LabType = serializers.CharField(max_length=100, required=True)
SafetyExamined = serializers.CharField(max_length=100, required=True)
SafetyTaught = serializers.CharField(max_length=100, required=True)
FName = serializers.CharField(max_length=100, required=True)
LName = serializers.CharField(max_length=100, required=True)
Phone = serializers.CharField(max_length=100, required=True)
Office = serializers.CharField(max_length=100, required=True)
Email = serializers.CharField(max_length=100, required=True)
def create(self, validated_data):
return Lab.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
LabNum=validated_data.get('LabNum'),
NumberOfLabs=validated_data.get('NumberOfLabs'),
LabType=validated_data.get('LabType'),
SafetyExamined=validated_data.get('SafetyExamined'),
SafetyTaught=validated_data.get('SafetyTaught'),
FName=validated_data.get('FName'),
LName=validated_data.get('LName'),
Phone=validated_data.get('Phone'),
Office=validated_data.get('Office'),
Email=validated_data.get('Email'),
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.LabNum = validated_data.get('LabNum', instance.LabNum)
instance.NumberOfLabs = validated_data.get('NumberOfLabs', instance.NumberOfLabs)
instance.LabType = validated_data.get('LabType', instance.LabType)
instance.SafetyExamined = validated_data.get('SafetyExamined', instance.SafetyExamined)
instance.SafetyTaught = validated_data.get('SafetyTaught', instance.SafetyTaught)
instance.FName = validated_data.get('FName', instance.FName)
instance.LName = validated_data.get('LName', instance.LName)
instance.Phone = validated_data.get('Phone', instance.Phone)
instance.Office = validated_data.get('Office', instance.Office)
instance.Email = validated_data.get('Email', instance.Email)
instance.save()
return instance
class Meta:
model = Lab
fields = (
'ModelID',
'CourseID',
'LabNum',
'NumberOfLabs',
'LabType',
'SafetyExamined',
'SafetyTaught',
'FName',
'LName',
'Phone',
'Office',
'Email'
)
class SectionSerializer(serializers.ModelSerializer):
# ModelID = serializers.CharField(max_length=100, required=True)
CourseID = serializers.CharField(max_length=100, required=True)
SectionNumber = serializers.CharField(max_length=100, required=False)
Students = serializers.IntegerField(required=False)
Hours = serializers.IntegerField(required=False)
type = serializers.CharField(max_length=100, required=True)
def create(self, validated_data):
return Section.objects.create(
ModelID=validated_data.get('ModelID'),
CourseID=validated_data.get('CourseID'),
SectionNumber=validated_data.get('SectionNumber'),
Students=validated_data.get('Students'),
Hours=validated_data.get('Hours'),
type=validated_data.get('type'),
)
def update(self, instance, validated_data):
instance.ModelID = validated_data.get('ModelID', instance.ModelID)
instance.CourseID = validated_data.get('CourseID', instance.CourseID)
instance.SectionNumber = validated_data.get('SectionNumber', instance.SectionNumber)
instance.Students = validated_data.get('Students', instance.Students)
instance.Hours = validated_data.get('Hours', instance.Hours)
instance.type = validated_data.get('type', instance.type)
instance.save()
return instance
class Meta:
model = Section
fields = (
'ModelID',
'CourseID',
'SectionNumber',
'Students',
'Hours',
'type'
)
| 42.649329
| 108
| 0.66824
| 2,481
| 25,419
| 6.726723
| 0.050383
| 0.163581
| 0.174486
| 0.142489
| 0.8206
| 0.645875
| 0.638444
| 0.554137
| 0.531128
| 0.523998
| 0
| 0.013246
| 0.224832
| 25,419
| 595
| 109
| 42.721008
| 0.833739
| 0.059837
| 0
| 0.517787
| 0
| 0
| 0.090867
| 0
| 0
| 0
| 0
| 0.001681
| 0
| 1
| 0.055336
| false
| 0
| 0.003953
| 0.027668
| 0.322134
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed22f71576a11a3b9302f73902c8de9c8f96d4dd
| 1,244
|
py
|
Python
|
frontends/pytorch/python/torch_mlir_torchscript_e2e_test_configs/torchscript.py
|
raikonenfnu/mlir-npcomp
|
29e1b2fe89848d58c9bc07e7df7ce651850a5244
|
[
"Apache-2.0"
] | null | null | null |
frontends/pytorch/python/torch_mlir_torchscript_e2e_test_configs/torchscript.py
|
raikonenfnu/mlir-npcomp
|
29e1b2fe89848d58c9bc07e7df7ce651850a5244
|
[
"Apache-2.0"
] | null | null | null |
frontends/pytorch/python/torch_mlir_torchscript_e2e_test_configs/torchscript.py
|
raikonenfnu/mlir-npcomp
|
29e1b2fe89848d58c9bc07e7df7ce651850a5244
|
[
"Apache-2.0"
] | null | null | null |
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
import copy
from typing import Any
import torch
from torch_mlir_torchscript.e2e_test.framework import TestConfig, Trace, TraceItem
class TorchScriptTestConfig(TestConfig):
"""TestConfig that runs the torch.nn.Module through TorchScript"""
def __init__(self):
super().__init__()
def compile(self, program: torch.nn.Module) -> torch.jit.ScriptModule:
return torch.jit.script(program)
def run(self, artifact: torch.jit.ScriptModule, trace: Trace) -> Trace:
# TODO: Deepcopy the torch.jit.ScriptModule, so that if the program is
# stateful then it does not mutate the original compiled program.
result: Trace = []
for item in trace:
attr = artifact
for part in item.symbol.split('.'):
attr = getattr(attr, part)
output = attr(*item.inputs)
result.append(
TraceItem(symbol=item.symbol,
inputs=item.inputs,
output=output))
return result
| 34.555556
| 82
| 0.644695
| 150
| 1,244
| 5.273333
| 0.52
| 0.040455
| 0.075853
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005482
| 0.266881
| 1,244
| 35
| 83
| 35.542857
| 0.861842
| 0.312701
| 0
| 0
| 0
| 0
| 0.001183
| 0
| 0
| 0
| 0
| 0.028571
| 0
| 1
| 0.142857
| false
| 0
| 0.190476
| 0.047619
| 0.47619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed23166702dcea8d3e5e73f8ed58f0971f2a45b0
| 2,495
|
py
|
Python
|
app/balltracking/pubnubpython/pnconfiguration.py
|
gdmgent-1718-wot/interactive-wall
|
af7ecff126b1ee9c85c270fe13d1338aa790c34b
|
[
"Apache-2.0"
] | null | null | null |
app/balltracking/pubnubpython/pnconfiguration.py
|
gdmgent-1718-wot/interactive-wall
|
af7ecff126b1ee9c85c270fe13d1338aa790c34b
|
[
"Apache-2.0"
] | null | null | null |
app/balltracking/pubnubpython/pnconfiguration.py
|
gdmgent-1718-wot/interactive-wall
|
af7ecff126b1ee9c85c270fe13d1338aa790c34b
|
[
"Apache-2.0"
] | null | null | null |
from .enums import PNHeartbeatNotificationOptions, PNReconnectionPolicy
from . import utils
class PNConfiguration(object):
DEFAULT_PRESENCE_TIMEOUT = 300
DEFAULT_HEARTBEAT_INTERVAL = 280
def __init__(self):
# TODO: add validation
self.uuid = None
self.origin = "ps.pndsn.com"
self.ssl = False
self.non_subscribe_request_timeout = 10
self.subscribe_request_timeout = 310
self.connect_timeout = 5
self.subscribe_key = None
self.publish_key = None
self.secret_key = None
self.cipher_key = None
self.auth_key = None
self.filter_expression = None
self.enable_subscribe = True
self.crypto_instance = None
self.log_verbosity = False
self.heartbeat_notification_options = PNHeartbeatNotificationOptions.FAILURES
self.reconnect_policy = PNReconnectionPolicy.NONE
self.daemon = False
self.heartbeat_default_values = True
self._presence_timeout = PNConfiguration.DEFAULT_PRESENCE_TIMEOUT
self._heartbeat_interval = PNConfiguration.DEFAULT_HEARTBEAT_INTERVAL
def validate(self):
assert self.uuid is None or isinstance(self.uuid, str)
if self.uuid is None:
self.uuid = utils.uuid()
def scheme(self):
if self.ssl:
return "https"
else:
return "http"
def scheme_extended(self):
return self.scheme() + "://"
def scheme_and_host(self):
return self.scheme_extended() + self.origin
def set_presence_timeout_with_custom_interval(self, timeout, interval):
self.heartbeat_default_values = False
self._presence_timeout = timeout
self._heartbeat_interval = interval
def set_presence_timeout(self, timeout):
self.set_presence_timeout_with_custom_interval(timeout, (timeout / 2) - 1)
@property
def crypto(self):
if self.crypto_instance is None:
self._init_cryptodome()
return self.crypto_instance
def _init_cryptodome(self):
from .crypto import PubNubCryptodome
self.crypto_instance = PubNubCryptodome()
@property
def port(self):
return 443 if self.ssl == "https" else 80
@property
def presence_timeout(self):
return self._presence_timeout
@property
def heartbeat_interval(self):
return self._heartbeat_interval
# TODO: set log level
# TODO: set log level
| 29.352941
| 85
| 0.666132
| 278
| 2,495
| 5.723022
| 0.302158
| 0.055311
| 0.034569
| 0.032684
| 0.045255
| 0.045255
| 0
| 0
| 0
| 0
| 0
| 0.01036
| 0.26493
| 2,495
| 84
| 86
| 29.702381
| 0.857143
| 0.024048
| 0
| 0.063492
| 0
| 0
| 0.011929
| 0
| 0
| 0
| 0
| 0.011905
| 0.015873
| 1
| 0.190476
| false
| 0
| 0.047619
| 0.079365
| 0.412698
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed24828337abdac65179c3d1fc89a55415ddc15a
| 1,871
|
py
|
Python
|
language/Basics/stringformatting.py
|
Binary-bug/Python
|
233425ded6abc26c889599a82a181487789e3bab
|
[
"MIT"
] | null | null | null |
language/Basics/stringformatting.py
|
Binary-bug/Python
|
233425ded6abc26c889599a82a181487789e3bab
|
[
"MIT"
] | null | null | null |
language/Basics/stringformatting.py
|
Binary-bug/Python
|
233425ded6abc26c889599a82a181487789e3bab
|
[
"MIT"
] | null | null | null |
age = 24
print("My age is " + str(age) + " years ")
# the above procedure is tedious since we dont really want to include str for every number we encounter
#Method1 Replacement Fields
print("My age is {0} years ".format(age)) # {0} is the actual replacement field, number important for multiple replacement fields
print("There are {0} days in {1}, {2}, {3}, {4}, {5}, {6} and {7} ".format(31,"January","March","May","july","August","october","december"))
#each of the arguments of .format are matched to their respective replacement fields
print("""January:{2}
February:{0}
March:{2}
April:{1}
""".format(28,30,31))
#Method2 Formatting operator not recommended though style from python 2
print("My age is %d years" % age)
print("My age is %d %s, %d %s" % (age,"years",6,"months"))
#^ old format and it was elegant -__-
#
# for i in range(1,12):
# print("No, %2d squared is %4d and cubed is %4d" %(i,i**2,i**3)) # ** operator raises power %xd x allocates spaces
#
#
#
#
# #for comparison
# print()
# for i in range(1,12):
# print("No, %d squared is %d and cubed is %d" % (i,i**2,i**3))
#
#
# #adding more precision
#
# print("Pi is approximately %12.50f" % (22/7)) # 50 decimal precsion and 12 for spaces default is 6 spaces
#
#
#
#
#Replacement field syntax variant of above Python 2 tricks
for i in range(1,12):
print("No. {0:2} squared is {1:4} and cubed is {2:4}".format(i,i**2,i**3))
print()
#for left alignment
for i in range(1,12):
print("NO. {0:<2} squared is {1:<4} and cubed is {2:<4}".format(i,i**2,i**3))
#floating point precision
print("Pi is approximately {0:.50}".format(22/7))
#use of numbers in replacement fields is optional when the default order is implied
for i in range(1,12):
print("No. {:2} squared is {:4} and cubed is {:4}".format(i,i**2,i**3))
days = "Mon, Tue, Wed, Thu, Fri, Sat, Sun"
print(days[::5])
| 25.986111
| 140
| 0.649385
| 330
| 1,871
| 3.675758
| 0.372727
| 0.016488
| 0.024732
| 0.045342
| 0.236603
| 0.155812
| 0.155812
| 0.145919
| 0.093982
| 0.093982
| 0
| 0.0589
| 0.174238
| 1,871
| 72
| 141
| 25.986111
| 0.726214
| 0.513095
| 0
| 0.142857
| 0
| 0.142857
| 0.486887
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.571429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
ed25ac3871761ab8e7fb05fe5b59a6a001de70b4
| 154
|
py
|
Python
|
Euler0001.py
|
rbarillec/project_euler
|
db812f9ae53090b34716452d0cb9ec14bf218290
|
[
"MIT"
] | null | null | null |
Euler0001.py
|
rbarillec/project_euler
|
db812f9ae53090b34716452d0cb9ec14bf218290
|
[
"MIT"
] | null | null | null |
Euler0001.py
|
rbarillec/project_euler
|
db812f9ae53090b34716452d0cb9ec14bf218290
|
[
"MIT"
] | null | null | null |
def Euler0001():
max = 1000
sum = 0
for i in range(1, max):
if i%3 == 0 or i%5 == 0:
sum += i
print(sum)
Euler0001()
| 15.4
| 32
| 0.448052
| 25
| 154
| 2.76
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.197802
| 0.409091
| 154
| 10
| 33
| 15.4
| 0.56044
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0
| 0.125
| 0.125
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed3b6f60e4e30cf75b95e63f68e2b18f1cb5a0e8
| 1,122
|
py
|
Python
|
templates/integration/__init__.py
|
p7g/dd-trace-py
|
141ac0ab6e9962e3b3bafc9de172076075289a19
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 308
|
2016-12-07T16:49:27.000Z
|
2022-03-15T10:06:45.000Z
|
templates/integration/__init__.py
|
p7g/dd-trace-py
|
141ac0ab6e9962e3b3bafc9de172076075289a19
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 1,928
|
2016-11-28T17:13:18.000Z
|
2022-03-31T21:43:19.000Z
|
templates/integration/__init__.py
|
p7g/dd-trace-py
|
141ac0ab6e9962e3b3bafc9de172076075289a19
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 311
|
2016-11-27T03:01:49.000Z
|
2022-03-18T21:34:03.000Z
|
"""
The foo integration instruments the bar and baz features of the
foo library.
Enabling
~~~~~~~~
The foo integration is enabled automatically when using
:ref:`ddtrace-run <ddtracerun>` or :ref:`patch_all() <patch_all>`.
Or use :ref:`patch() <patch>` to manually enable the integration::
from ddtrace import patch
patch(foo=True)
Global Configuration
~~~~~~~~~~~~~~~~~~~~
.. py:data:: ddtrace.config.foo["service"]
The service name reported by default for foo instances.
This option can also be set with the ``DD_FOO_SERVICE`` environment
variable.
Default: ``"foo"``
Instance Configuration
~~~~~~~~~~~~~~~~~~~~~~
To configure the foo integration on an per-instance basis use the
``Pin`` API::
import foo
from ddtrace import Pin
myfoo = foo.Foo()
Pin.override(myfoo, service="myfoo")
"""
from ...internal.utils.importlib import require_modules
required_modules = ["foo"]
with require_modules(required_modules) as missing_modules:
if not missing_modules:
from .patch import patch
from .patch import unpatch
__all__ = ["patch", "unpatch"]
| 20.777778
| 70
| 0.678253
| 144
| 1,122
| 5.1875
| 0.506944
| 0.032129
| 0.068273
| 0.077644
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186275
| 1,122
| 53
| 71
| 21.169811
| 0.818182
| 0.741533
| 0
| 0
| 0
| 0
| 0.053191
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.428571
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
ed4086f481b4822d573ede5f8a9108ee4da236b6
| 290
|
py
|
Python
|
coding202-parsing-json/get-ap-json-1.py
|
firodj/ciscodevnet-coding-skills-sample-code
|
4fca975e450cf0c913001fe1b36582f7a094b1e7
|
[
"Apache-2.0"
] | null | null | null |
coding202-parsing-json/get-ap-json-1.py
|
firodj/ciscodevnet-coding-skills-sample-code
|
4fca975e450cf0c913001fe1b36582f7a094b1e7
|
[
"Apache-2.0"
] | null | null | null |
coding202-parsing-json/get-ap-json-1.py
|
firodj/ciscodevnet-coding-skills-sample-code
|
4fca975e450cf0c913001fe1b36582f7a094b1e7
|
[
"Apache-2.0"
] | null | null | null |
import requests
url = 'https://64.103.26.61/api/contextaware/v1/maps/info/DevNetCampus/DevNetBuilding/DevNetZone'
headers = {'Authorization': 'Basic bGVhcm5pbmc6bGVhcm5pbmc=='}
response = requests.get(url, headers=headers, verify=False)
responseString = response.text
print(responseString)
| 41.428571
| 97
| 0.8
| 33
| 290
| 7.030303
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04797
| 0.065517
| 290
| 6
| 98
| 48.333333
| 0.808118
| 0
| 0
| 0
| 0
| 0.166667
| 0.458621
| 0.086207
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed4193bd5735a5283b0caa027d333560a4c2793c
| 1,078
|
py
|
Python
|
lldb/test/API/lang/swift/optimized_code/bound_generic_enum/TestSwiftOptimizedBoundGenericEnum.py
|
LaudateCorpus1/llvm-project
|
ff2e0f0c1112558b3f30d8afec7c9882c33c79e3
|
[
"Apache-2.0"
] | 605
|
2019-10-18T01:15:54.000Z
|
2022-03-31T14:31:04.000Z
|
lldb/test/API/lang/swift/optimized_code/bound_generic_enum/TestSwiftOptimizedBoundGenericEnum.py
|
LaudateCorpus1/llvm-project
|
ff2e0f0c1112558b3f30d8afec7c9882c33c79e3
|
[
"Apache-2.0"
] | 3,180
|
2019-10-18T01:21:21.000Z
|
2022-03-31T23:25:41.000Z
|
lldb/test/API/lang/swift/optimized_code/bound_generic_enum/TestSwiftOptimizedBoundGenericEnum.py
|
LaudateCorpus1/llvm-project
|
ff2e0f0c1112558b3f30d8afec7c9882c33c79e3
|
[
"Apache-2.0"
] | 275
|
2019-10-18T05:27:22.000Z
|
2022-03-30T09:04:21.000Z
|
import lldb
from lldbsuite.test.decorators import *
import lldbsuite.test.lldbtest as lldbtest
import lldbsuite.test.lldbutil as lldbutil
import os
import unittest2
class TestSwiftOptimizedBoundGenericEnum(lldbtest.TestBase):
mydir = lldbtest.TestBase.compute_mydir(__file__)
@swiftTest
def test(self):
"""Test the bound generic enum types in "optimized" code."""
self.build()
target, process, thread, bkpt = lldbutil.run_to_source_breakpoint(self,
'break one', lldb.SBFileSpec('main.swift'))
bkpt_two = target.BreakpointCreateBySourceRegex(
'break two', lldb.SBFileSpec('main.swift'))
self.assertGreater(bkpt_two.GetNumLocations(), 0)
var_self = self.frame().FindVariable("self")
# FIXME, this fails with a data extractor error.
lldbutil.check_variable(self, var_self, False, value=None)
lldbutil.continue_to_breakpoint(process, bkpt_two)
var_self = self.frame().FindVariable("self")
lldbutil.check_variable(self, var_self, True, value="success")
| 35.933333
| 79
| 0.705009
| 125
| 1,078
| 5.928
| 0.528
| 0.037787
| 0.051282
| 0.062078
| 0.17274
| 0.17274
| 0
| 0
| 0
| 0
| 0
| 0.002299
| 0.19295
| 1,078
| 29
| 80
| 37.172414
| 0.849425
| 0.09462
| 0
| 0.095238
| 0
| 0
| 0.054639
| 0
| 0
| 0
| 0
| 0.034483
| 0.047619
| 1
| 0.047619
| false
| 0
| 0.285714
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed4408c93538d9f83abe75060897c6705abd216b
| 2,219
|
py
|
Python
|
social_webpy/app.py
|
python-social-auth/social-app-webpy
|
edcfd8dd95c66a3524961e5212e13c9c2e8515a3
|
[
"BSD-3-Clause"
] | 2
|
2017-06-21T15:29:09.000Z
|
2022-01-26T21:12:25.000Z
|
social_webpy/app.py
|
python-social-auth/social-app-webpy
|
edcfd8dd95c66a3524961e5212e13c9c2e8515a3
|
[
"BSD-3-Clause"
] | null | null | null |
social_webpy/app.py
|
python-social-auth/social-app-webpy
|
edcfd8dd95c66a3524961e5212e13c9c2e8515a3
|
[
"BSD-3-Clause"
] | 1
|
2018-10-21T07:33:36.000Z
|
2018-10-21T07:33:36.000Z
|
import web
from social_core.actions import do_auth, do_complete, do_disconnect
from .utils import psa, load_strategy, load_strategy
urls = (
r'/login/(?P<backend>[^/]+)/?', 'auth',
r'/complete/(?P<backend>[^/]+)/?', 'complete',
r'/disconnect/(?P<backend>[^/]+)/?', 'disconnect',
r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', 'disconnect',
)
class BaseViewClass(object):
def __init__(self, *args, **kwargs):
self.session = web.web_session
method = web.ctx.method == 'POST' and 'post' or 'get'
self.strategy = load_strategy()
self.data = web.input(_method=method)
self.backend = None
self._user = None
super(BaseViewClass, self).__init__(*args, **kwargs)
def get_current_user(self):
if not hasattr(self, '_user'):
if self.session.get('logged_in'):
self._user = self.strategy.get_user(
self.session.get('user_id')
)
else:
self._user = None
return self._user
def login_user(self, user):
self.session['logged_in'] = True
self.session['user_id'] = user.id
class auth(BaseViewClass):
def GET(self, backend):
return self._auth(backend)
def POST(self, backend):
return self._auth(backend)
@psa('/complete/%(backend)s/')
def _auth(self, backend):
return do_auth(self.backend)
class complete(BaseViewClass):
def GET(self, backend, *args, **kwargs):
return self._complete(backend, *args, **kwargs)
def POST(self, backend, *args, **kwargs):
return self._complete(backend, *args, **kwargs)
@psa('/complete/%(backend)s/')
def _complete(self, backend, *args, **kwargs):
return do_complete(
self.backend,
login=lambda backend, user, social_user: self.login_user(user),
user=self.get_current_user(), *args, **kwargs
)
class disconnect(BaseViewClass):
@psa()
def POST(self, backend, association_id=None):
return do_disconnect(self.backend, self.get_current_user(),
association_id)
app_social = web.application(urls, locals())
| 28.818182
| 78
| 0.598918
| 259
| 2,219
| 4.942085
| 0.216216
| 0.094531
| 0.066406
| 0.042188
| 0.222656
| 0.1375
| 0.0875
| 0.0875
| 0.0875
| 0.0875
| 0
| 0
| 0.250113
| 2,219
| 76
| 79
| 29.197368
| 0.769231
| 0
| 0
| 0.142857
| 0
| 0
| 0.121226
| 0.085174
| 0
| 0
| 0
| 0
| 0
| 1
| 0.178571
| false
| 0
| 0.053571
| 0.125
| 0.446429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
ed4409f82d978378f6be973493d164c3f3a747dd
| 2,133
|
py
|
Python
|
stellar/config.py
|
gomyar/stellar
|
b2dfbe136f1540f0ca6ac5779ebaeae996a3b747
|
[
"MIT"
] | null | null | null |
stellar/config.py
|
gomyar/stellar
|
b2dfbe136f1540f0ca6ac5779ebaeae996a3b747
|
[
"MIT"
] | null | null | null |
stellar/config.py
|
gomyar/stellar
|
b2dfbe136f1540f0ca6ac5779ebaeae996a3b747
|
[
"MIT"
] | null | null | null |
import os
import logging
import yaml
from schema import Use, Schema, SchemaError, Optional
class InvalidConfig(Exception):
pass
class MissingConfig(Exception):
pass
default_config = {
'logging': 30,
'migrate_from_0_3_2': True
}
schema = Schema({
'stellar_url': Use(str),
'url': Use(str),
'project_name': Use(str),
'tracked_databases': [Use(str)],
Optional('logging'): int,
Optional('migrate_from_0_3_2'): bool
})
def get_config_path():
current_directory = os.getcwd()
while True:
try:
with open(
os.path.join(current_directory, 'stellar.yaml'),
'rb'
) as fp:
return os.path.join(current_directory, 'stellar.yaml')
except IOError:
pass
current_directory = os.path.abspath(
os.path.join(current_directory, '..')
)
if current_directory == '/':
return None
def load_config():
config = {}
stellar_config_env = os.getenv('STELLAR_CONFIG')
if stellar_config_env:
if os.path.exists(stellar_config_env):
config = yaml.safe_load(open(stellar_config_env))
else:
current_directory = os.getcwd()
while True:
try:
with open(
os.path.join(current_directory, 'stellar.yaml'),
'rb'
) as fp:
config = yaml.safe_load(fp)
break
except IOError:
pass
if current_directory == '/':
break
current_directory = os.path.abspath(
os.path.join(current_directory, '..')
)
if not config:
raise MissingConfig()
for k, v in default_config.items():
if k not in config:
config[k] = v
try:
return schema.validate(config)
except SchemaError as e:
raise InvalidConfig(e)
def save_config(config):
logging.getLogger(__name__).debug('save_config()')
with open(get_config_path(), "w") as fp:
yaml.dump(config, fp)
| 23.43956
| 70
| 0.554149
| 234
| 2,133
| 4.854701
| 0.307692
| 0.15493
| 0.044014
| 0.074824
| 0.310739
| 0.286092
| 0.286092
| 0.253521
| 0.253521
| 0.253521
| 0
| 0.005678
| 0.339428
| 2,133
| 90
| 71
| 23.7
| 0.800568
| 0
| 0
| 0.402778
| 0
| 0
| 0.078293
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0.055556
| 0.055556
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
ed44cdd790149a7a7aba7ae668b2598d57504c5a
| 9,404
|
py
|
Python
|
movement_validation/features/feature_processing_options.py
|
eulerkaku/movement_validation
|
af939a42a97c1de889cf13bad0c22a2824d60947
|
[
"MIT"
] | null | null | null |
movement_validation/features/feature_processing_options.py
|
eulerkaku/movement_validation
|
af939a42a97c1de889cf13bad0c22a2824d60947
|
[
"MIT"
] | null | null | null |
movement_validation/features/feature_processing_options.py
|
eulerkaku/movement_validation
|
af939a42a97c1de889cf13bad0c22a2824d60947
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
This module will hold a class that will be referenced when processing features.
I'd like to move things from "config" into here ...
- @JimHokanson
"""
from __future__ import division
from .. import utils
#Can't do this, would be circular
#from .worm_features import WormFeatures
class FeatureProcessingOptions(object):
def __init__(self,fps):
#The idea with this attribute is that functions will check if they are
#in this list. If they are then they can display some sort of popup that
#clarifies how they are working.
#
#No functions actually use this yet. It is just a place holder.
#
#An example of this might be:
# 'morphology.length'
# s
self.functions_to_explain = []
#This indicates that, where possible, code should attempt to replicate
#the errors and inconsistencies present in the way that the Schafer lab
#computed features. This can be useful for ensuring that we are able to
#compute features in the same way that they did.
#
#NOTE: There are a few instances where this is not supported such that
#the behavior will not match even if this value is set to True.
self.mimic_old_behaviour = True
self.locomotion = LocomotionOptions(fps)
self.posture = PostureOptions(fps)
#TODO: Implement this
#This is not yet implemented. The idea is to support not
#computing certain features. We might also allow disabling certain
#groups of feature.
self.features_to_ignore = []
def should_compute_feature(self,feature_name,worm_features):
"""
"""
#TODO: Implement this ...
return True
def disable_contour_features(self):
"""
Contour features:
"""
#see self.features_to_ignore
contour_dependent_features = [\
'morphology.width',
'morphology.area',
'morphology.area_per_length',
'morphology.width_per_length',
'posture.eccentricity']
self.features_to_ignore = list(set(self.features_to_ignore + contour_dependent_features))
def disable_feature_sections(self,section_names):
"""
This can be used to disable processing of features by section (see the
options available below)
Modifies 'features_to_ignore'
Parameters
----------
section_names : list[str]
Options are:
- morphology
- locomotion
- posture
- path
Examples
--------
fpo.disable_feature_sections(['morphology'])
fpo.disable_feature_sections(['morphology','locomotion'])
"""
new_ignores = []
f = IgnorableFeatures()
for section in section_names:
new_ignores.extend(getattr(f,section))
self.features_to_ignore = list(set(self.features_to_ignore + new_ignores))
def __repr__(self):
return utils.print_object(self)
class PostureOptions(object):
def __init__(self,fps):
self.n_eccentricity_grid_points = 50 # Grid size for estimating eccentricity, this is the
# max # of points that will fill the wide dimension.
# (scalar) The # of points to place in the long dimension. More points
# gives a more accurate estimate of the ellipse but increases
# the calculation time.
#
#Used by: posture_features.get_eccentricity_and_orientation
self.coiling_frame_threshold = round(1/5 * fps) #This is the # of
#frames that an epoch must exceed in order for it to be truly
#considered a coiling event
#Current value translation: 1/5 of a second
#
#Used by: posture_features.get_worm_coils
self.n_eigenworms_use = 6
#The maximum # of available values is 7 although technically there
#are generally 48 eigenvectors avaiable, we've just only precomputed
#7 to use for the projections
#
#Used by:
self.kink_length_threshold_pct = 1/12 #This the fraction of the worm
#length that a bend must be in order to be counted. The # of worm
#points (this_value*worm_length_in_samples) is rounded to an integer
#value. The threshold value is inclusive.
#
#Use: posture_features.get_worm_kinks
self.wavelength = PostureWavelengthOptions()
class PostureWavelengthOptions(object):
"""
These options are all used in:
get_amplitude_and_wavelength
"""
def __init__(self):
self.n_points_fft = 512
self.min_dist_peaks = 5 #This value is in samples, not a
#spatial frequency. The spatial frequency sampling also varies by
#the worm length, so this resolution varies on a frame by frame basis.
self.pct_max_cutoff = 0.5
self.pct_cutoff = 2
class LocomotionOptions(object):
def __init__(self,fps):
#locomotion_features.LocomotionVelocity
#-------------------------------------
#Units: seconds
#NOTE: We could get the defaults from the class ...
self.velocity_tip_diff = 0.25
self.velocity_body_diff = 0.5
#locomotion_features.MotionEvents
#--------------------------------------
# Interpolate only this length of NaN run; anything longer is
# probably an omega turn.
# If set to "None", interpolate all lengths (i.e. infinity)
#TODO - Inf would be a better specification
self.motion_codes_longest_nan_run_to_interpolate = None
# These are a percentage of the worm's length
self.motion_codes_speed_threshold_pct = 0.05
self.motion_codes_distance_threshold_pct = 0.05
self.motion_codes_pause_threshold_pct = 0.025
# These are times (s)
self.motion_codes_min_frames_threshold = 0.5
self.motion_codes_max_interframes_threshold = 0.25
#locomotion_bends.LocomotionCrawlingBends
self.crawling_bends = LocomotionCrawlingBends(fps)
self.foraging_bends = LocomotionForagingBends(fps)
self.locomotion_turns = LocomotionTurns(fps)
def __repr__(self):
return utils.print_object(self)
class LocomotionTurns(object):
def __init__(self,fps):
self.max_interpolation_gap_allowed = 9 #frames
self.min_omega_event_length = round(fps/4)
#TODO: There is still a lot to put into here
class LocomotionForagingBends(object):
def __init__(self,fps):
#NOTE: The nose & neck can also be thought of as the head tip
#and head neck
self.min_nose_window_samples = round(0.1 * fps)
self.max_samples_interp_nose = 2*self.min_nose_window_samples - 1
class LocomotionCrawlingBends(object):
def __init__(self,fps):
self.fft_n_samples = 2 ** 14
self.bends_partitions = \
{'head': (5, 10),
'midbody': (22, 27),
'tail': (39, 44)}
self.peak_energy_threshold = 0.5
# max_amplitude_pct_bandwidth - when determining the bandwidth,
# the minimums that are found can't exceed this percentage of the maximum.
# Doing so invalidates the result.
self.max_amplitude_pct_bandwidth = 0.5
self.min_time_for_bend = 0.5
self.max_time_for_bend = 15
#TODO: What are the units on these things ????
#This is a spatial frequency
self.min_frequency = 0.25 * self.max_time_for_bend
#What is the technical max???? 0.5 fps????
self.max_frequency = 0.25 * fps
#This is a processing optimization.
#How far into the maximum peaks should we look ...
#If this value is low, an expensive computation could go faster. If it
#is too low, then we end up rerunning the calculation the whole dataset
#and we end up losing time
self.initial_max_I_pct = 0.5
def __repr__(self):
return utils.print_object(self)
class IgnorableFeatures:
"""
I'm not thrilled with where this is placed, but placing it in WormFeatures
creates a circular dependency
"""
def __init__(self):
temp = ['length','width','area','area_per_length','width_per_length']
self.morphology = ['morphology.' + s for s in temp]
#None of these are implemented ...
temp = ['velocity','motion_events','motion_mode','crawling_bends','foraging_bends','turns']
self.locomotion = ['locomotion.' + s for s in temp]
#locomotion
#crawling_bends: Done
#turns: Done
temp = ['bends','eccentricity', 'amplitude_and_wavelength','kinks','coils','directions','eigen_projection']
self.posture = ['posture.' + s for s in temp]
#None of these are implemented ...
| 34.072464
| 116
| 0.606231
| 1,125
| 9,404
| 4.873778
| 0.322667
| 0.010213
| 0.01605
| 0.018603
| 0.139522
| 0.089732
| 0.0766
| 0.053256
| 0.053256
| 0.030275
| 0
| 0.012403
| 0.314122
| 9,404
| 276
| 117
| 34.072464
| 0.837674
| 0.43205
| 0
| 0.16092
| 0
| 0
| 0.067644
| 0.015456
| 0
| 0
| 0
| 0.01087
| 0
| 1
| 0.16092
| false
| 0
| 0.022989
| 0.034483
| 0.321839
| 0.034483
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed4a99de0c0a371040dd37f7fc6fad45e488b616
| 132
|
py
|
Python
|
6.爬取豆瓣排行榜电影数据(含GUI界面版)/main.py
|
shengqiangzhang/examples-of-web-crawlers
|
89eb6c169b8824a6a9bc78e7a32e064d33560aa7
|
[
"MIT"
] | 12,023
|
2019-03-13T08:53:27.000Z
|
2022-03-31T21:31:15.000Z
|
6.爬取豆瓣排行榜电影数据(含GUI界面版)/main.py
|
shengqiangzhang/examples-of-web-crawlers
|
89eb6c169b8824a6a9bc78e7a32e064d33560aa7
|
[
"MIT"
] | 100
|
2019-03-14T04:09:12.000Z
|
2022-03-22T14:24:11.000Z
|
6.爬取豆瓣排行榜电影数据(含GUI界面版)/main.py
|
shengqiangzhang/examples-of-web-crawlers
|
89eb6c169b8824a6a9bc78e7a32e064d33560aa7
|
[
"MIT"
] | 3,693
|
2019-03-13T08:21:22.000Z
|
2022-03-31T16:07:08.000Z
|
# -*- coding:utf-8 -*-
from uiObject import uiObject
# main入口
if __name__ == '__main__':
ui = uiObject()
ui.ui_process()
| 13.2
| 29
| 0.621212
| 16
| 132
| 4.5625
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009709
| 0.219697
| 132
| 10
| 30
| 13.2
| 0.699029
| 0.204545
| 0
| 0
| 0
| 0
| 0.07767
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed4ce61eb4af04f3704ae96a5870d43583535a63
| 524
|
py
|
Python
|
photos/models.py
|
eude313/vault
|
d3e24cf01d15de94244b7d2e80316355a0827f74
|
[
"MIT"
] | null | null | null |
photos/models.py
|
eude313/vault
|
d3e24cf01d15de94244b7d2e80316355a0827f74
|
[
"MIT"
] | null | null | null |
photos/models.py
|
eude313/vault
|
d3e24cf01d15de94244b7d2e80316355a0827f74
|
[
"MIT"
] | null | null | null |
from django.db import models
from cloudinary.models import CloudinaryField
# Create your models here.
class Category(models.Model):
name = models.CharField( max_length=200, null=False, blank=False )
def __str__(self):
return self.name
class Photo(models.Model):
category = models.ForeignKey( Category, on_delete=models.SET_NULL, null=True, blank=True )
image = CloudinaryField('image', default='')
description = models.TextField()
def __str__(self):
return self.description
| 29.111111
| 94
| 0.71374
| 64
| 524
| 5.671875
| 0.546875
| 0.077135
| 0.055096
| 0.088154
| 0.110193
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007026
| 0.185115
| 524
| 18
| 95
| 29.111111
| 0.843091
| 0.045802
| 0
| 0.166667
| 0
| 0
| 0.01002
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.166667
| 0.166667
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
ed4da4c3e62ea1a20080eade8fbb9743d55cdd88
| 3,558
|
py
|
Python
|
doc/examples.py
|
Enerccio/mahjong
|
903505a7886c31845dfa6b3f54c936a4feb29e6e
|
[
"MIT"
] | 254
|
2017-09-20T15:02:20.000Z
|
2022-03-28T11:33:28.000Z
|
doc/examples.py
|
Enerccio/mahjong
|
903505a7886c31845dfa6b3f54c936a4feb29e6e
|
[
"MIT"
] | 39
|
2017-09-23T14:28:36.000Z
|
2022-01-06T08:41:57.000Z
|
doc/examples.py
|
Enerccio/mahjong
|
903505a7886c31845dfa6b3f54c936a4feb29e6e
|
[
"MIT"
] | 38
|
2017-10-19T09:06:53.000Z
|
2022-03-15T05:08:22.000Z
|
from mahjong.hand_calculating.hand import HandCalculator
from mahjong.meld import Meld
from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules
from mahjong.shanten import Shanten
from mahjong.tile import TilesConverter
calculator = HandCalculator()
# useful helper
def print_hand_result(hand_result):
print(hand_result.han, hand_result.fu)
print(hand_result.cost['main'])
print(hand_result.yaku)
for fu_item in hand_result.fu_details:
print(fu_item)
print('')
####################################################################
# Tanyao hand by ron #
####################################################################
# we had to use all 14 tiles in that array
tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444')
win_tile = TilesConverter.string_to_136_array(sou='4')[0]
result = calculator.estimate_hand_value(tiles, win_tile)
print_hand_result(result)
####################################################################
# Tanyao hand by tsumo #
####################################################################
result = calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True))
print_hand_result(result)
####################################################################
# Add open set to hand #
####################################################################
melds = [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))]
result = calculator.estimate_hand_value(tiles, win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True)))
print_hand_result(result)
####################################################################
# Shanten calculation #
####################################################################
shanten = Shanten()
tiles = TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443')
result = shanten.calculate_shanten(tiles)
print(result)
####################################################################
# Kazoe as a sanbaiman #
####################################################################
tiles = TilesConverter.string_to_136_array(man='22244466677788')
win_tile = TilesConverter.string_to_136_array(man='7')[0]
melds = [
Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False)
]
dora_indicators = [
TilesConverter.string_to_136_array(man='1')[0],
TilesConverter.string_to_136_array(man='1')[0],
TilesConverter.string_to_136_array(man='1')[0],
TilesConverter.string_to_136_array(man='1')[0],
]
config = HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN))
result = calculator.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config)
print_hand_result(result)
####################################################################
# Change the cost of yaku #
####################################################################
config = HandConfig(is_renhou=True)
# renhou as an yakuman - old style
config.yaku.renhou.han_closed = 13
tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444')
win_tile = TilesConverter.string_to_136_array(sou='4')[0]
result = calculator.estimate_hand_value(tiles, win_tile, config=config)
print_hand_result(result)
| 35.227723
| 133
| 0.537943
| 355
| 3,558
| 5.132394
| 0.261972
| 0.1427
| 0.15697
| 0.164654
| 0.493963
| 0.403952
| 0.384193
| 0.322173
| 0.322173
| 0.236004
| 0
| 0.039509
| 0.153457
| 3,558
| 100
| 134
| 35.58
| 0.565405
| 0.13575
| 0
| 0.282609
| 0
| 0
| 0.032992
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021739
| false
| 0
| 0.108696
| 0
| 0.130435
| 0.26087
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed5bb200d9597641b3d366c18b6bda01b9a7883d
| 6,119
|
py
|
Python
|
src/TF-gui/tftrain.py
|
jeetsagar/turbojet
|
9b17edde0a7e01d0fa320261fbc2734ce53577d2
|
[
"MIT"
] | null | null | null |
src/TF-gui/tftrain.py
|
jeetsagar/turbojet
|
9b17edde0a7e01d0fa320261fbc2734ce53577d2
|
[
"MIT"
] | null | null | null |
src/TF-gui/tftrain.py
|
jeetsagar/turbojet
|
9b17edde0a7e01d0fa320261fbc2734ce53577d2
|
[
"MIT"
] | 2
|
2021-05-20T05:47:59.000Z
|
2021-08-24T07:44:37.000Z
|
#!python3
import os
import pandas as pd
import tensorflow as tf
from tensorflow.keras import layers
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
# gpu_devices = tf.config.experimental.list_physical_devices("GPU")
# for device in gpu_devices:
# tf.config.experimental.set_memory_growth(device, True)
def trainModel(data_in, params_in):
data_in = data_in.take(2048)
data_in = data_in.shuffle(24)
data_in = data_in.batch(1024)
arch = params_in["Architecture"]
dropout = params_in["Dropout"]
lr = params_in["LearningRate"]
attrs = params_in["Attrs"]
epochs = params_in["Epochs"]
if arch == "BaseCNN":
if params_in["BatchNorm"]:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.BatchNormalization(),
layers.Flatten(),
layers.Dense(50, "relu"),
layers.Dense(1)
])
else:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Flatten(),
layers.Dense(50, "relu"),
layers.Dense(1)
])
elif arch == "CNN-LSTM":
if params_in["BatchNorm"]:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.BatchNormalization(),
layers.Reshape((5, 10)),
layers.LSTM(30, return_sequences=False),
layers.Dense(50, "relu"),
layers.Dense(1)
])
else:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Reshape((5, 10)),
layers.LSTM(30, return_sequences=False),
layers.Dense(50, "relu"),
layers.Dense(1)
])
elif arch == "CNN-2LSTM":
if params_in["BatchNorm"]:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.BatchNormalization(),
layers.Reshape((5, 10)),
layers.LSTM(30, return_sequences=True),
layers.LSTM(30, return_sequences=False),
layers.Dense(1)
])
else:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Reshape((5, 10)),
layers.LSTM(30, return_sequences=True),
layers.LSTM(30, return_sequences=False),
layers.Dense(1)
])
model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True))
filepath = "./checkpoints/Model_in-" + arch + str(attrs) + ".h5"
losses = []
class CustomModelCheckPoint(tf.keras.callbacks.Callback):
def __init__(self, **kargs):
super(CustomModelCheckPoint, self).__init__(**kargs)
self.epoch_loss = {} # accuracy at given epoch
def on_epoch_begin(self, epoch, logs={}):
# Things done on beginning of epoch.
return
def on_epoch_end(self, epoch, logs={}):
# things done on end of the epoch
self.epoch_loss[epoch] = logs.get("loss")
losses.append(self.epoch_loss[epoch])
if params_in["ResumeTraining"]:
model.load_weights(filepath)
checkpoint2 = CustomModelCheckPoint()
checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0, save_best_only=True,
save_freq='epoch')
model.fit(data_in, epochs=epochs, callbacks=[checkpoint, checkpoint2])
df_loss = pd.DataFrame()
df_loss["Epochs"] = list(range(1, epochs + 1))
df_loss["Loss"] = losses
df_loss.to_csv("./losses/lossTrend.csv", index=False)
| 42.493056
| 120
| 0.57346
| 664
| 6,119
| 5.161145
| 0.201807
| 0.063029
| 0.099796
| 0.094543
| 0.674934
| 0.657426
| 0.642836
| 0.642836
| 0.642836
| 0.638751
| 0
| 0.032579
| 0.292695
| 6,119
| 143
| 121
| 42.79021
| 0.759242
| 0.041183
| 0
| 0.669492
| 0
| 0
| 0.061263
| 0.007679
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033898
| false
| 0
| 0.033898
| 0.008475
| 0.084746
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed5bcaf7cb360ac7f0af74528df0eb589224f1a5
| 5,434
|
py
|
Python
|
library/kong_api.py
|
sebastienc/ansible-kong-module
|
c1e7b471a517d1ec99c5629f3729ebc34088bd64
|
[
"MIT"
] | 34
|
2016-03-09T17:10:52.000Z
|
2019-12-25T08:31:49.000Z
|
library/kong_api.py
|
sebastienc/ansible-kong-module
|
c1e7b471a517d1ec99c5629f3729ebc34088bd64
|
[
"MIT"
] | 6
|
2016-05-16T14:09:05.000Z
|
2018-07-23T21:09:33.000Z
|
library/kong_api.py
|
sebastienc/ansible-kong-module
|
c1e7b471a517d1ec99c5629f3729ebc34088bd64
|
[
"MIT"
] | 23
|
2016-02-17T12:18:16.000Z
|
2021-05-06T09:39:35.000Z
|
#!/usr/bin/python
DOCUMENTATION = '''
---
module: kong
short_description: Configure a Kong API Gateway
'''
EXAMPLES = '''
- name: Register a site
kong:
kong_admin_uri: http://127.0.0.1:8001/apis/
name: "Mockbin"
taget_url: "http://mockbin.com"
request_host: "mockbin.com"
state: present
- name: Delete a site
kong:
kong_admin_uri: http://127.0.0.1:8001/apis/
name: "Mockbin"
state: absent
'''
import json, requests, os
class KongAPI:
def __init__(self, base_url, auth_username=None, auth_password=None):
self.base_url = base_url
if auth_username is not None and auth_password is not None:
self.auth = (auth_username, auth_password)
else:
self.auth = None
def __url(self, path):
return "{}{}" . format (self.base_url, path)
def _api_exists(self, name, api_list):
for api in api_list:
if name == api.get("name", None):
return True
return False
def add_or_update(self, name, upstream_url, request_host=None, request_path=None, strip_request_path=False, preserve_host=False):
method = "post"
url = self.__url("/apis/")
api_list = self.list().json().get("data", [])
api_exists = self._api_exists(name, api_list)
if api_exists:
method = "patch"
url = "{}{}" . format (url, name)
data = {
"name": name,
"upstream_url": upstream_url,
"strip_request_path": strip_request_path,
"preserve_host": preserve_host
}
if request_host is not None:
data['request_host'] = request_host
if request_path is not None:
data['request_path'] = request_path
return getattr(requests, method)(url, data, auth=self.auth)
def list(self):
url = self.__url("/apis")
return requests.get(url, auth=self.auth)
def info(self, id):
url = self.__url("/apis/{}" . format (id))
return requests.get(url, auth=self.auth)
def delete_by_name(self, name):
info = self.info(name)
id = info.json().get("id")
return self.delete(id)
def delete(self, id):
path = "/apis/{}" . format (id)
url = self.__url(path)
return requests.delete(url, auth=self.auth)
class ModuleHelper:
def __init__(self, fields):
self.fields = fields
def get_module(self):
args = dict(
kong_admin_uri = dict(required=False, type='str'),
kong_admin_username = dict(required=False, type='str'),
kong_admin_password = dict(required=False, type='str'),
name = dict(required=False, type='str'),
upstream_url = dict(required=False, type='str'),
request_host = dict(required=False, type='str'),
request_path = dict(required=False, type='str'),
strip_request_path = dict(required=False, default=False, type='bool'),
preserve_host = dict(required=False, default=False, type='bool'),
state = dict(required=False, default="present", choices=['present', 'absent', 'latest', 'list', 'info'], type='str'),
)
return AnsibleModule(argument_spec=args,supports_check_mode=False)
def prepare_inputs(self, module):
url = module.params['kong_admin_uri']
auth_user = module.params['kong_admin_username']
auth_password = module.params['kong_admin_password']
state = module.params['state']
data = {}
for field in self.fields:
value = module.params.get(field, None)
if value is not None:
data[field] = value
return (url, data, state, auth_user, auth_password)
def get_response(self, response, state):
if state == "present":
meta = response.json()
has_changed = response.status_code in [201, 200]
if state == "absent":
meta = {}
has_changed = response.status_code == 204
if state == "list":
meta = response.json()
has_changed = False
return (has_changed, meta)
def main():
fields = [
'name',
'upstream_url',
'request_host',
'request_path',
'strip_request_path',
'preserve_host'
]
helper = ModuleHelper(fields)
global module # might not need this
module = helper.get_module()
base_url, data, state, auth_user, auth_password = helper.prepare_inputs(module)
api = KongAPI(base_url, auth_user, auth_password)
if state == "present":
response = api.add_or_update(**data)
if state == "absent":
response = api.delete_by_name(data.get("name"))
if state == "list":
response = api.list()
if response.status_code == 401:
module.fail_json(msg="Please specify kong_admin_username and kong_admin_password", meta=response.json())
elif response.status_code == 403:
module.fail_json(msg="Please check kong_admin_username and kong_admin_password", meta=response.json())
else:
has_changed, meta = helper.get_response(response, state)
module.exit_json(changed=has_changed, meta=meta)
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| 30.188889
| 133
| 0.597902
| 662
| 5,434
| 4.685801
| 0.193353
| 0.034816
| 0.054803
| 0.047389
| 0.303997
| 0.199871
| 0.179884
| 0.088975
| 0.066409
| 0.066409
| 0
| 0.008947
| 0.280088
| 5,434
| 179
| 134
| 30.357542
| 0.783998
| 0.006625
| 0
| 0.145985
| 0
| 0
| 0.159036
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.094891
| false
| 0.072993
| 0.021898
| 0.007299
| 0.211679
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
ed5e905c814c4d72273c16c39c47e06ae62fc1f0
| 897
|
gyp
|
Python
|
tools/android/android_tools.gyp
|
SlimKatLegacy/android_external_chromium_org
|
ee480ef5039d7c561fc66ccf52169ead186f1bea
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2015-03-04T02:36:53.000Z
|
2016-06-25T11:22:17.000Z
|
tools/android/android_tools.gyp
|
j4ckfrost/android_external_chromium_org
|
a1a3dad8b08d1fcf6b6b36c267158ed63217c780
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/android/android_tools.gyp
|
j4ckfrost/android_external_chromium_org
|
a1a3dad8b08d1fcf6b6b36c267158ed63217c780
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 4
|
2015-02-09T08:49:30.000Z
|
2017-08-26T02:03:34.000Z
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
# Intermediate target grouping the android tools needed to run native
# unittests and instrumentation test apks.
{
'target_name': 'android_tools',
'type': 'none',
'dependencies': [
'adb_reboot/adb_reboot.gyp:adb_reboot',
'forwarder2/forwarder.gyp:forwarder2',
'md5sum/md5sum.gyp:md5sum',
'purge_ashmem/purge_ashmem.gyp:purge_ashmem',
],
},
{
'target_name': 'memdump',
'type': 'none',
'dependencies': [
'memdump/memdump.gyp:memdump',
],
},
{
'target_name': 'memconsumer',
'type': 'none',
'dependencies': [
'memconsumer/memconsumer.gyp:memconsumer',
],
},
],
}
| 25.628571
| 73
| 0.596433
| 94
| 897
| 5.585106
| 0.595745
| 0.057143
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013761
| 0.270903
| 897
| 34
| 74
| 26.382353
| 0.788991
| 0.298774
| 0
| 0.357143
| 0
| 0
| 0.536116
| 0.325843
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed693e39d7414ae26d14dc6568bc549d2c30f321
| 1,452
|
py
|
Python
|
DD/Terrain.py
|
CodingBullywug/DDreshape
|
393e5ea336eb6cb78f31345731ccf52baf19bfac
|
[
"MIT"
] | 2
|
2020-04-13T04:47:26.000Z
|
2022-02-19T06:10:04.000Z
|
DD/Terrain.py
|
CodingBullywug/DDreshape
|
393e5ea336eb6cb78f31345731ccf52baf19bfac
|
[
"MIT"
] | null | null | null |
DD/Terrain.py
|
CodingBullywug/DDreshape
|
393e5ea336eb6cb78f31345731ccf52baf19bfac
|
[
"MIT"
] | 1
|
2020-04-13T04:47:30.000Z
|
2020-04-13T04:47:30.000Z
|
from DD.utils import PoolByteArray2NumpyArray, NumpyArray2PoolByteArray
from DD.Entity import Entity
import numpy as np
class Terrain(Entity):
def __init__(self, json, width, height, scale=4, terrain_types=4):
super(Terrain, self).__init__(json)
self._scale = scale
self.terrain_types = terrain_types
self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types, order='C')
def get_json(self):
json = self._json
json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C'))
return json
def pad(self, top, bottom, left, right):
self.splat = np.pad(self.splat,
((top*self._scale, bottom*self._scale), (left*self._scale, right*self._scale), (0,0)),
mode='edge')
def crop(self, top, bottom, left, right):
self.splat = self._crop_map_safe(self.splat, top, bottom, left, right, self._scale)
def fliplr(self, width):
self.splat = np.fliplr(self.splat)
def flipud(self, height):
self.splat = np.flipud(self.splat)
def rot90(self, width, height):
self.splat = self._rot90_map(self.splat)
def rot180(self, width, height):
self.splat = self._rot180_map(self.splat)
def rot270(self, width, height):
self.splat = self._rot270_map(self.splat)
| 37.230769
| 144
| 0.644628
| 188
| 1,452
| 4.808511
| 0.244681
| 0.169248
| 0.057522
| 0.059735
| 0.185841
| 0.161504
| 0.068584
| 0
| 0
| 0
| 0
| 0.021409
| 0.227961
| 1,452
| 38
| 145
| 38.210526
| 0.785013
| 0
| 0
| 0
| 0
| 0
| 0.011019
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.310345
| false
| 0
| 0.103448
| 0
| 0.482759
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed6aff1082796c2046965ddce3d39f2087944e89
| 925
|
py
|
Python
|
setup.py
|
marcus-luck/zohoreader
|
e832f076a8a87bf27607980fb85a1d2bc8339743
|
[
"MIT"
] | 1
|
2020-11-11T02:19:50.000Z
|
2020-11-11T02:19:50.000Z
|
setup.py
|
marcus-luck/zohoreader
|
e832f076a8a87bf27607980fb85a1d2bc8339743
|
[
"MIT"
] | null | null | null |
setup.py
|
marcus-luck/zohoreader
|
e832f076a8a87bf27607980fb85a1d2bc8339743
|
[
"MIT"
] | null | null | null |
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='zohoreader',
version='0.1',
description='A simple reader for zoho projects API to get all projects, users and timereports',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
],
keywords='zoho, API, zoho project',
url='https://github.com/marcus-luck/zohoreader',
author='Marcus Luck',
author_email='[email protected]',
license='MIT',
packages=['zohoreader'],
zip_safe=False,
install_requires=[
'requests>=2.12.4',
'python-dateutil>=2.7.2'
],
test_suite='nose.collector',
tests_require=['nose', 'nose-cover3'],
include_package_data=True
)
| 28.030303
| 101
| 0.596757
| 106
| 925
| 5.132075
| 0.754717
| 0.055147
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019034
| 0.261622
| 925
| 32
| 102
| 28.90625
| 0.777452
| 0
| 0
| 0.071429
| 0
| 0
| 0.418378
| 0.048649
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| true
| 0
| 0.035714
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed6c19de3061a6952b4f83f10500239e87852cc5
| 2,883
|
py
|
Python
|
autumn/projects/covid_19/sri_lanka/sri_lanka/project.py
|
emmamcbryde/AuTuMN-1
|
b1e7de15ac6ef6bed95a80efab17f0780ec9ff6f
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
autumn/projects/covid_19/sri_lanka/sri_lanka/project.py
|
emmamcbryde/AuTuMN-1
|
b1e7de15ac6ef6bed95a80efab17f0780ec9ff6f
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
autumn/projects/covid_19/sri_lanka/sri_lanka/project.py
|
emmamcbryde/AuTuMN-1
|
b1e7de15ac6ef6bed95a80efab17f0780ec9ff6f
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
import numpy as np
from autumn.calibration.proposal_tuning import perform_all_params_proposal_tuning
from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, \
use_tuned_proposal_sds
from autumn.calibration import Calibration
from autumn.calibration.priors import UniformPrior, BetaPrior,TruncNormalPrior
from autumn.calibration.targets import (
NormalTarget,
get_dispersion_priors_for_gaussian_targets,
)
from autumn.models.covid_19 import base_params, build_model
from autumn.settings import Region, Models
from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts
# Load and configure model parameters.
default_path = build_rel_path("params/default.yml")
#scenario_paths = [build_rel_path(f"params/scenario-{i}.yml") for i in range(7, 9)]
mle_path = build_rel_path("params/mle-params.yml")
baseline_params = base_params.update(default_path).update(mle_path, calibration_format=True)
all_scenario_dicts = get_all_scenario_dicts("LKA")
#scenario_params = [baseline_params.update(p) for p in scenario_paths]
scenario_params = [baseline_params.update(sc_dict) for sc_dict in all_scenario_dicts]
param_set = ParameterSet(baseline=baseline_params, scenarios=scenario_params)
ts_set = load_timeseries(build_rel_path("timeseries.json"))
notifications_ts = ts_set["notifications"].rolling(7).mean().loc[350::7]
death_ts = ts_set["infection_deaths"].loc[350:]
targets = [
NormalTarget(notifications_ts),
NormalTarget(death_ts),
]
priors = [
# Dispersion parameters based on targets
*get_dispersion_priors_for_gaussian_targets(targets),
*get_dispersion_priors_for_gaussian_targets(targets),
# Regional parameters
UniformPrior("contact_rate", [0.024, 0.027]),
UniformPrior("infectious_seed", [275.0, 450.0]),
# Detection
UniformPrior("testing_to_detection.assumed_cdr_parameter", [0.009, 0.025]),
UniformPrior("infection_fatality.multiplier", [0.09, 0.13]),
#VoC
UniformPrior("voc_emergence.alpha_beta.start_time", [370, 410]),
UniformPrior("voc_emergence.alpha_beta.contact_rate_multiplier", [3.2, 4.5]),
UniformPrior("voc_emergence.delta.start_time", [475, 530]),
UniformPrior("voc_emergence.delta.contact_rate_multiplier", [8.5, 11.5]),
]
# Load proposal sds from yml file
# use_tuned_proposal_sds(priors, build_rel_path("proposal_sds.yml"))
calibration = Calibration(priors, targets)
# FIXME: Replace with flexible Python plot request API.
import json
plot_spec_filepath = build_rel_path("timeseries.json")
with open(plot_spec_filepath) as f:
plot_spec = json.load(f)
project = Project(
Region.SRI_LANKA, Models.COVID_19, build_model, param_set, calibration, plots=plot_spec
)
#perform_all_params_proposal_tuning(project, calibration, priors, n_points=50, relative_likelihood_reduction=0.2)
| 43.029851
| 123
| 0.794658
| 401
| 2,883
| 5.399002
| 0.351621
| 0.036952
| 0.038799
| 0.030485
| 0.213395
| 0.06097
| 0.04388
| 0.04388
| 0
| 0
| 0
| 0.027413
| 0.10163
| 2,883
| 67
| 124
| 43.029851
| 0.808494
| 0.181755
| 0
| 0.043478
| 0
| 0
| 0.151193
| 0.105622
| 0
| 0
| 0
| 0.014925
| 0
| 1
| 0
| false
| 0
| 0.217391
| 0
| 0.217391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed6c49af1afdf5e937dac3ecb68b0de9cb7816d4
| 11,421
|
py
|
Python
|
selfdrive/sensord/rawgps/structs.py
|
TC921/openpilot
|
d5d91e6e3be02e2525ed8d6137e5fdca2b81657c
|
[
"MIT"
] | null | null | null |
selfdrive/sensord/rawgps/structs.py
|
TC921/openpilot
|
d5d91e6e3be02e2525ed8d6137e5fdca2b81657c
|
[
"MIT"
] | null | null | null |
selfdrive/sensord/rawgps/structs.py
|
TC921/openpilot
|
d5d91e6e3be02e2525ed8d6137e5fdca2b81657c
|
[
"MIT"
] | null | null | null |
from struct import unpack_from, calcsize
LOG_GNSS_POSITION_REPORT = 0x1476
LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477
LOG_GNSS_CLOCK_REPORT = 0x1478
LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480
LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756
LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886
LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE
LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1
LOG_GNSS_ME_DPO_STATUS = 0x1838
LOG_GNSS_CD_DB_REPORT = 0x147B
LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E
LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488
LOG_GNSS_CONFIGURATION_STATE = 0x1516
glonass_measurement_report = """
uint8_t version;
uint32_t f_count;
uint8_t glonass_cycle_number;
uint16_t glonass_number_of_days;
uint32_t milliseconds;
float time_bias;
float clock_time_uncertainty;
float clock_frequency_bias;
float clock_frequency_uncertainty;
uint8_t sv_count;
"""
glonass_measurement_report_sv = """
uint8_t sv_id;
int8_t frequency_index;
uint8_t observation_state; // SVObservationStates
uint8_t observations;
uint8_t good_observations;
uint8_t hemming_error_count;
uint8_t filter_stages;
uint16_t carrier_noise;
int16_t latency;
uint8_t predetect_interval;
uint16_t postdetections;
uint32_t unfiltered_measurement_integral;
float unfiltered_measurement_fraction;
float unfiltered_time_uncertainty;
float unfiltered_speed;
float unfiltered_speed_uncertainty;
uint32_t measurement_status;
uint8_t misc_status;
uint32_t multipath_estimate;
float azimuth;
float elevation;
int32_t carrier_phase_cycles_integral;
uint16_t carrier_phase_cycles_fraction;
float fine_speed;
float fine_speed_uncertainty;
uint8_t cycle_slip_count;
uint32_t pad;
"""
gps_measurement_report = """
uint8_t version;
uint32_t f_count;
uint16_t week;
uint32_t milliseconds;
float time_bias;
float clock_time_uncertainty;
float clock_frequency_bias;
float clock_frequency_uncertainty;
uint8_t sv_count;
"""
gps_measurement_report_sv = """
uint8_t sv_id;
uint8_t observation_state; // SVObservationStates
uint8_t observations;
uint8_t good_observations;
uint16_t parity_error_count;
uint8_t filter_stages;
uint16_t carrier_noise;
int16_t latency;
uint8_t predetect_interval;
uint16_t postdetections;
uint32_t unfiltered_measurement_integral;
float unfiltered_measurement_fraction;
float unfiltered_time_uncertainty;
float unfiltered_speed;
float unfiltered_speed_uncertainty;
uint32_t measurement_status;
uint8_t misc_status;
uint32_t multipath_estimate;
float azimuth;
float elevation;
int32_t carrier_phase_cycles_integral;
uint16_t carrier_phase_cycles_fraction;
float fine_speed;
float fine_speed_uncertainty;
uint8_t cycle_slip_count;
uint32_t pad;
"""
position_report = """
uint8 u_Version; /* Version number of DM log */
uint32 q_Fcount; /* Local millisecond counter */
uint8 u_PosSource; /* Source of position information */ /* 0: None 1: Weighted least-squares 2: Kalman filter 3: Externally injected 4: Internal database */
uint32 q_Reserved1; /* Reserved memory field */
uint16 w_PosVelFlag; /* Position velocity bit field: (see DM log 0x1476 documentation) */
uint32 q_PosVelFlag2; /* Position velocity 2 bit field: (see DM log 0x1476 documentation) */
uint8 u_FailureCode; /* Failure code: (see DM log 0x1476 documentation) */
uint16 w_FixEvents; /* Fix events bit field: (see DM log 0x1476 documentation) */
uint32 _fake_align_week_number;
uint16 w_GpsWeekNumber; /* GPS week number of position */
uint32 q_GpsFixTimeMs; /* GPS fix time of week of in milliseconds */
uint8 u_GloNumFourYear; /* Number of Glonass four year cycles */
uint16 w_GloNumDaysInFourYear; /* Glonass calendar day in four year cycle */
uint32 q_GloFixTimeMs; /* Glonass fix time of day in milliseconds */
uint32 q_PosCount; /* Integer count of the number of unique positions reported */
uint64 t_DblFinalPosLatLon[2]; /* Final latitude and longitude of position in radians */
uint32 q_FltFinalPosAlt; /* Final height-above-ellipsoid altitude of position */
uint32 q_FltHeadingRad; /* User heading in radians */
uint32 q_FltHeadingUncRad; /* User heading uncertainty in radians */
uint32 q_FltVelEnuMps[3]; /* User velocity in east, north, up coordinate frame. In meters per second. */
uint32 q_FltVelSigmaMps[3]; /* Gaussian 1-sigma value for east, north, up components of user velocity */
uint32 q_FltClockBiasMeters; /* Receiver clock bias in meters */
uint32 q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma value for receiver clock bias in meters */
uint32 q_FltGGTBMeters; /* GPS to Glonass time bias in meters */
uint32 q_FltGGTBSigmaMeters; /* Gaussian 1-sigma value for GPS to Glonass time bias uncertainty in meters */
uint32 q_FltGBTBMeters; /* GPS to BeiDou time bias in meters */
uint32 q_FltGBTBSigmaMeters; /* Gaussian 1-sigma value for GPS to BeiDou time bias uncertainty in meters */
uint32 q_FltBGTBMeters; /* BeiDou to Glonass time bias in meters */
uint32 q_FltBGTBSigmaMeters; /* Gaussian 1-sigma value for BeiDou to Glonass time bias uncertainty in meters */
uint32 q_FltFiltGGTBMeters; /* Filtered GPS to Glonass time bias in meters */
uint32 q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to Glonass time bias uncertainty in meters */
uint32 q_FltFiltGBTBMeters; /* Filtered GPS to BeiDou time bias in meters */
uint32 q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to BeiDou time bias uncertainty in meters */
uint32 q_FltFiltBGTBMeters; /* Filtered BeiDou to Glonass time bias in meters */
uint32 q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for BeiDou to Glonass time bias uncertainty in meters */
uint32 q_FltSftOffsetSec; /* SFT offset as computed by WLS in seconds */
uint32 q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma value for SFT offset in seconds */
uint32 q_FltClockDriftMps; /* Clock drift (clock frequency bias) in meters per second */
uint32 q_FltClockDriftSigmaMps; /* Gaussian 1-sigma value for clock drift in meters per second */
uint32 q_FltFilteredAlt; /* Filtered height-above-ellipsoid altitude in meters as computed by WLS */
uint32 q_FltFilteredAltSigma; /* Gaussian 1-sigma value for filtered height-above-ellipsoid altitude in meters */
uint32 q_FltRawAlt; /* Raw height-above-ellipsoid altitude in meters as computed by WLS */
uint32 q_FltRawAltSigma; /* Gaussian 1-sigma value for raw height-above-ellipsoid altitude in meters */
uint32 align_Flt[14];
uint32 q_FltPdop; /* 3D position dilution of precision as computed from the unweighted
uint32 q_FltHdop; /* Horizontal position dilution of precision as computed from the unweighted least-squares covariance matrix */
uint32 q_FltVdop; /* Vertical position dilution of precision as computed from the unweighted least-squares covariance matrix */
uint8 u_EllipseConfidence; /* Statistical measure of the confidence (percentage) associated with the uncertainty ellipse values */
uint32 q_FltEllipseAngle; /* Angle of semimajor axis with respect to true North, with increasing angles moving clockwise from North. In units of degrees. */
uint32 q_FltEllipseSemimajorAxis; /* Semimajor axis of final horizontal position uncertainty error ellipse. In units of meters. */
uint32 q_FltEllipseSemiminorAxis; /* Semiminor axis of final horizontal position uncertainty error ellipse. In units of meters. */
uint32 q_FltPosSigmaVertical; /* Gaussian 1-sigma value for final position height-above-ellipsoid altitude in meters */
uint8 u_HorizontalReliability; /* Horizontal position reliability 0: Not set 1: Very Low 2: Low 3: Medium 4: High */
uint8 u_VerticalReliability; /* Vertical position reliability */
uint16 w_Reserved2; /* Reserved memory field */
uint32 q_FltGnssHeadingRad; /* User heading in radians derived from GNSS only solution */
uint32 q_FltGnssHeadingUncRad; /* User heading uncertainty in radians derived from GNSS only solution */
uint32 q_SensorDataUsageMask; /* Denotes which additional sensor data were used to compute this position fix. BIT[0] 0x00000001 <96> Accelerometer BIT[1] 0x00000002 <96> Gyro 0x0000FFFC - Reserved A bit set to 1 indicates that certain fields as defined by the SENSOR_AIDING_MASK were aided with sensor data*/
uint32 q_SensorAidMask; /* Denotes which component of the position report was assisted with additional sensors defined in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96> Heading aided with sensor data BIT[1] 0x00000002 <96> Speed aided with sensor data BIT[2] 0x00000004 <96> Position aided with sensor data BIT[3] 0x00000008 <96> Velocity aided with sensor data 0xFFFFFFF0 <96> Reserved */
uint8 u_NumGpsSvsUsed; /* The number of GPS SVs used in the fix */
uint8 u_TotalGpsSvs; /* Total number of GPS SVs detected by searcher, including ones not used in position calculation */
uint8 u_NumGloSvsUsed; /* The number of Glonass SVs used in the fix */
uint8 u_TotalGloSvs; /* Total number of Glonass SVs detected by searcher, including ones not used in position calculation */
uint8 u_NumBdsSvsUsed; /* The number of BeiDou SVs used in the fix */
uint8 u_TotalBdsSvs; /* Total number of BeiDou SVs detected by searcher, including ones not used in position calculation */
"""
def name_to_camelcase(nam):
ret = []
i = 0
while i < len(nam):
if nam[i] == "_":
ret.append(nam[i+1].upper())
i += 2
else:
ret.append(nam[i])
i += 1
return ''.join(ret)
def parse_struct(ss):
st = "<"
nams = []
for l in ss.strip().split("\n"):
typ, nam = l.split(";")[0].split()
#print(typ, nam)
if typ == "float" or '_Flt' in nam:
st += "f"
elif typ == "double" or '_Dbl' in nam:
st += "d"
elif typ in ["uint8", "uint8_t"]:
st += "B"
elif typ in ["int8", "int8_t"]:
st += "b"
elif typ in ["uint32", "uint32_t"]:
st += "I"
elif typ in ["int32", "int32_t"]:
st += "i"
elif typ in ["uint16", "uint16_t"]:
st += "H"
elif typ in ["int16", "int16_t"]:
st += "h"
elif typ == "uint64":
st += "Q"
else:
print("unknown type", typ)
assert False
if '[' in nam:
cnt = int(nam.split("[")[1].split("]")[0])
st += st[-1]*(cnt-1)
for i in range(cnt):
nams.append("%s[%d]" % (nam.split("[")[0], i))
else:
nams.append(nam)
return st, nams
def dict_unpacker(ss, camelcase = False):
st, nams = parse_struct(ss)
if camelcase:
nams = [name_to_camelcase(x) for x in nams]
sz = calcsize(st)
return lambda x: dict(zip(nams, unpack_from(st, x))), sz
| 50.76
| 403
| 0.698976
| 1,487
| 11,421
| 5.174849
| 0.246133
| 0.040026
| 0.02872
| 0.02924
| 0.488369
| 0.446524
| 0.424691
| 0.382326
| 0.369591
| 0.312411
| 0
| 0.047333
| 0.228614
| 11,421
| 224
| 404
| 50.986607
| 0.826107
| 0.001313
| 0
| 0.349057
| 0
| 0.051887
| 0.832968
| 0.108637
| 0
| 0
| 0.015958
| 0
| 0.004717
| 1
| 0.014151
| false
| 0
| 0.004717
| 0
| 0.033019
| 0.004717
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed6e652c3847138189ca7b951889b9b3a32aa8ce
| 1,702
|
py
|
Python
|
jassen/django/project/project/urls.py
|
cabilangan112/intern-drf-blog
|
b2d6c7a4af1316b2c7ce38547bd9df99b4f3e8b9
|
[
"MIT"
] | null | null | null |
jassen/django/project/project/urls.py
|
cabilangan112/intern-drf-blog
|
b2d6c7a4af1316b2c7ce38547bd9df99b4f3e8b9
|
[
"MIT"
] | null | null | null |
jassen/django/project/project/urls.py
|
cabilangan112/intern-drf-blog
|
b2d6c7a4af1316b2c7ce38547bd9df99b4f3e8b9
|
[
"MIT"
] | null | null | null |
"""project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from rest_framework import routers
from blog import views
from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet
from django.conf import settings
from django.conf.urls.static import static
router = routers.DefaultRouter()
router.register(r'hide',HideViewSet, base_name='hiddinn')
router.register(r'draft',DraftViewSet, base_name='draft')
router.register(r'post', PostViewSet, base_name='post')
router.register(r'comment', CommentViewSet, base_name='comment')
router.register(r'tags', TagViewSet, base_name='tags')
router.register(r'category', CategoryViewSet, base_name='category')
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
urlpatterns.extend(
static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) +
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
)
| 37.822222
| 101
| 0.756757
| 239
| 1,702
| 5.313808
| 0.343096
| 0.047244
| 0.070866
| 0.018898
| 0.092126
| 0.092126
| 0.059055
| 0
| 0
| 0
| 0
| 0.005348
| 0.121034
| 1,702
| 45
| 102
| 37.822222
| 0.843583
| 0.36604
| 0
| 0
| 0
| 0
| 0.10904
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
ed6ff0df42bec5dfbd4d71634bb7ab44a9c003d2
| 9,473
|
py
|
Python
|
django_town/rest_swagger/views.py
|
uptown/django-town
|
4c3b078a8ce5dcc275d65faa4a1cdfb7ebc74a50
|
[
"MIT"
] | null | null | null |
django_town/rest_swagger/views.py
|
uptown/django-town
|
4c3b078a8ce5dcc275d65faa4a1cdfb7ebc74a50
|
[
"MIT"
] | null | null | null |
django_town/rest_swagger/views.py
|
uptown/django-town
|
4c3b078a8ce5dcc275d65faa4a1cdfb7ebc74a50
|
[
"MIT"
] | null | null | null |
from django_town.rest import RestApiView, rest_api_manager
from django_town.http import http_json_response
from django_town.cache.utlis import SimpleCache
from django_town.oauth2.swagger import swagger_authorizations_data
from django_town.social.oauth2.permissions import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly
from django_town.social.permissions import Authenticated, AuthenticatedOrReadOnly
class ApiDocsView(RestApiView):
def read(self, request, api_version):
def load_cache(api_version="alpha"):
manager = rest_api_manager(api_version)
ret = {'title': manager.name,
'description': manager.description,
'apiVersion': manager.api_version, 'swaggerVersion': "1.2", 'basePath': manager.base_url,
'resourcePath': manager.base_url, 'info': manager.info,
'authorizations': swagger_authorizations_data()}
apis = []
models = {
"Error": {
"id": "Error",
"required": ['error'],
"properties": {
"error": {
"type": "string"
},
"field": {
"type": "string"
},
"message": {
"type": "string"
},
"resource": {
"type": "string"
}
}
}
}
for view_cls in manager.api_list:
operations = []
global_params = []
path = view_cls.path()
if path == "":
continue
if '{}' in path:
path = path.replace('{}', '{pk}')
global_params.append(
{
"paramType": "path",
"name": 'pk',
"description": 'primary key for object',
"dataType": 'integer',
"format": 'int64',
"required": True,
}
)
responseMessages = [
{
'code': 404,
"message": "not_found",
"responseModel": "Error"
},
{
'code': 500,
"message": "internal_error",
"responseModel": "Error"
},
{
'code': 409,
"message": "method_not_allowed",
"responseModel": "Error"
},
{
'code': 409,
"message": "conflict",
"responseModel": "Error"
},
{
'code': 403,
"message": "forbidden",
"responseModel": "Error"
},
{
'code': 401,
"message": "permission_denied",
"responseModel": "Error"
},
{
'code': 401,
"message": "unauthorized",
"responseModel": "Error"
},
{
'code': 400,
"message": "form_invalid",
"responseModel": "Error"
},
{
'code': 400,
"message": "form_required",
"responseModel": "Error"
},
{
'code': 400,
"message": "bad_request",
"responseModel": "Error"
},
]
current_api = {
'path': path,
'description': view_cls.__doc__,
}
operations = []
if 'create' in view_cls.crud_method_names and hasattr(view_cls, 'create'):
create_op = {
'method': 'POST',
'parameters': global_params,
'responseMessages': responseMessages,
'nickname': 'create ' + path,
}
operations.append(create_op)
if 'read' in view_cls.crud_method_names and hasattr(view_cls, 'read'):
op = {
'method': 'GET',
'responseMessages': responseMessages,
'nickname': 'read ' + path
}
params = global_params.copy()
for each_permission in view_cls.permission_classes:
if issubclass(each_permission, OAuth2Authenticated):
params.append(
{
"paramType": "query",
"name": 'access_token',
"dataType": 'string',
"required": True,
}
)
if hasattr(view_cls, 'read_safe_parameters'):
for each in view_cls.read_safe_parameters:
if isinstance(each, tuple):
if each[1] == int:
params.append(
{
"paramType": "query",
"name": each[0],
"dataType": 'int',
"format": 'int64',
"required": True,
}
)
elif each[1] == float:
params.append(
{
"paramType": "query",
"name": each[0],
"dataType": 'float',
"format": 'float',
"required": True,
}
)
else:
params.append(
{
"paramType": "query",
"name": each[0],
"dataType": 'string',
"required": True,
}
)
else:
params.append(
{
"paramType": "query",
"name": each,
"dataType": 'string',
"required": True,
}
)
pass
pass
op['parameters'] = params
operations.append(op)
if 'update' in view_cls.crud_method_names and hasattr(view_cls, 'update'):
op = {
'method': 'UPDATE',
'parameters': global_params,
'responseMessages': responseMessages,
'errorResponses': [],
'nickname': 'read ' + path,
}
operations.append(op)
if 'delete' in view_cls.crud_method_names and hasattr(view_cls, 'delete'):
op = {
'method': 'DELETE',
'parameters': global_params,
'responseMessages': responseMessages,
'errorResponses': [],
'nickname': 'read ' + path,
}
operations.append(op)
current_api['operations'] = operations
apis.append(current_api)
ret['apis'] = apis
ret["models"] = models
return ret
ret = SimpleCache(key_format="api-doc:%(api_version)s", duration=60 * 60 * 24,
load_callback=load_cache).get(api_version=api_version)
response = http_json_response(ret)
response["Access-Control-Allow-Origin"] = "*"
response["Access-Control-Allow-Methods"] = "GET"
response["Access-Control-Max-Age"] = "1000"
response["Access-Control-Allow-Headers"] = "*"
return response
| 43.059091
| 108
| 0.331468
| 525
| 9,473
| 5.819048
| 0.28
| 0.032079
| 0.064812
| 0.042553
| 0.304092
| 0.207856
| 0.184288
| 0.184288
| 0.153191
| 0.120458
| 0
| 0.01407
| 0.579859
| 9,473
| 219
| 109
| 43.255708
| 0.753518
| 0
| 0
| 0.320755
| 0
| 0
| 0.146522
| 0.013512
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009434
| false
| 0.009434
| 0.028302
| 0
| 0.051887
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed75ef3dbcd90991f3b2e3a5c73442983622bbb5
| 452
|
py
|
Python
|
thinkutils_plus/eventbus/sample/myeventbus.py
|
ThinkmanWang/thinkutils_plus
|
65d56a1a0cfce22dff08a4f0baea6b4eb08a2e35
|
[
"MIT"
] | null | null | null |
thinkutils_plus/eventbus/sample/myeventbus.py
|
ThinkmanWang/thinkutils_plus
|
65d56a1a0cfce22dff08a4f0baea6b4eb08a2e35
|
[
"MIT"
] | null | null | null |
thinkutils_plus/eventbus/sample/myeventbus.py
|
ThinkmanWang/thinkutils_plus
|
65d56a1a0cfce22dff08a4f0baea6b4eb08a2e35
|
[
"MIT"
] | null | null | null |
__author__ = 'Xsank'
import time
from thinkutils_plus.eventbus.eventbus import EventBus
from myevent import GreetEvent
from myevent import ByeEvent
from mylistener import MyListener
if __name__=="__main__":
eventbus=EventBus()
eventbus.register(MyListener())
ge=GreetEvent('world')
be=ByeEvent('world')
eventbus.async_post(be)
eventbus.post(ge)
time.sleep(0.1)
eventbus.unregister(MyListener())
eventbus.destroy()
| 23.789474
| 54
| 0.743363
| 53
| 452
| 6.075472
| 0.490566
| 0.149068
| 0.10559
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005236
| 0.154867
| 452
| 19
| 55
| 23.789474
| 0.837696
| 0
| 0
| 0
| 0
| 0
| 0.050773
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.3125
| 0
| 0.3125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
ed7d1c9bb5710045f4cb95dccf219d3b5c6faaa9
| 2,564
|
py
|
Python
|
pyfisher/mpi.py
|
borisbolliet/pyfisher
|
715e192baa4fadbff754416d2b001c3708c9276c
|
[
"BSD-3-Clause"
] | 7
|
2017-12-06T18:16:13.000Z
|
2021-02-09T19:25:26.000Z
|
pyfisher/mpi.py
|
borisbolliet/pyfisher
|
715e192baa4fadbff754416d2b001c3708c9276c
|
[
"BSD-3-Clause"
] | 34
|
2016-01-25T19:48:07.000Z
|
2021-02-03T22:34:09.000Z
|
pyfisher/mpi.py
|
borisbolliet/pyfisher
|
715e192baa4fadbff754416d2b001c3708c9276c
|
[
"BSD-3-Clause"
] | 10
|
2017-02-01T15:14:22.000Z
|
2021-02-16T01:34:16.000Z
|
from __future__ import print_function
import numpy as np
import os,sys,time
"""
Copied from orphics.mpi
"""
try:
disable_mpi_env = os.environ['DISABLE_MPI']
disable_mpi = True if disable_mpi_env.lower().strip() == "true" else False
except:
disable_mpi = False
"""
Use the below cleanup stuff only for intel-mpi!
If you use it on openmpi, you will have no traceback for errors
causing hours of endless confusion and frustration! - Sincerely, past frustrated Mat
"""
# From Sigurd's enlib.mpi:
# Uncaught exceptions don't cause mpi to abort. This can lead to thousands of
# wasted CPU hours
# def cleanup(type, value, traceback):
# sys.__excepthook__(type, value, traceback)
# MPI.COMM_WORLD.Abort(1)
# sys.excepthook = cleanup
class fakeMpiComm:
"""
A Simple Fake MPI implementation
"""
def __init__(self):
pass
def Get_rank(self):
return 0
def Get_size(self):
return 1
def Barrier(self):
pass
def Abort(self,dummy):
pass
try:
if disable_mpi: raise
from mpi4py import MPI
except:
if not(disable_mpi): print("WARNING: mpi4py could not be loaded. Falling back to fake MPI. This means that if you submitted multiple processes, they will all be assigned the same rank of 0, and they are potentially doing the same thing.")
class template:
pass
MPI = template()
MPI.COMM_WORLD = fakeMpiComm()
def mpi_distribute(num_tasks,avail_cores,allow_empty=False):
# copied to mapsims.convert_noise_templates
if not(allow_empty): assert avail_cores<=num_tasks
min_each, rem = divmod(num_tasks,avail_cores)
num_each = np.array([min_each]*avail_cores) # first distribute equally
if rem>0: num_each[-rem:] += 1 # add the remainder to the last set of cores (so that rank 0 never gets extra jobs)
task_range = list(range(num_tasks)) # the full range of tasks
cumul = np.cumsum(num_each).tolist() # the end indices for each task
task_dist = [task_range[x:y] for x,y in zip([0]+cumul[:-1],cumul)] # a list containing the tasks for each core
assert sum(num_each)==num_tasks
assert len(num_each)==avail_cores
assert len(task_dist)==avail_cores
return num_each,task_dist
def distribute(njobs,verbose=True,**kwargs):
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
numcores = comm.Get_size()
num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs)
if rank==0: print ("At most ", max(num_each) , " tasks...")
my_tasks = each_tasks[rank]
return comm,rank,my_tasks
| 29.813953
| 242
| 0.697738
| 391
| 2,564
| 4.409207
| 0.432225
| 0.032483
| 0.020882
| 0.020882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0059
| 0.206708
| 2,564
| 85
| 243
| 30.164706
| 0.841691
| 0.207878
| 0
| 0.166667
| 0
| 0.020833
| 0.135747
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0.145833
| false
| 0.083333
| 0.083333
| 0.041667
| 0.354167
| 0.0625
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
71ec7e1ab519fe39c3c2b69f2a497fd39095d1ca
| 15,524
|
py
|
Python
|
tests/pytests/test_tags.py
|
wayn111/RediSearch
|
897b2de35988b84851dd8380c614a21ad8da7c0f
|
[
"BSD-3-Clause",
"Ruby",
"Apache-2.0",
"MIT"
] | null | null | null |
tests/pytests/test_tags.py
|
wayn111/RediSearch
|
897b2de35988b84851dd8380c614a21ad8da7c0f
|
[
"BSD-3-Clause",
"Ruby",
"Apache-2.0",
"MIT"
] | null | null | null |
tests/pytests/test_tags.py
|
wayn111/RediSearch
|
897b2de35988b84851dd8380c614a21ad8da7c0f
|
[
"BSD-3-Clause",
"Ruby",
"Apache-2.0",
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from includes import *
from common import *
def search(env, r, *args):
return r.execute_command('ft.search', *args)
def testTagIndex(env):
r = env
env.expect('ft.create', 'idx', 'ON', 'HASH','schema', 'title', 'text', 'tags', 'tag').ok()
N = 10
for n in range(N):
env.expect('ft.add', 'idx', 'doc%d' % n, 1.0, 'fields',
'title', 'hello world term%d' % n, 'tags', 'foo bar,xxx,tag %d' % n).ok()
for _ in r.retry_with_rdb_reload():
waitForIndex(r, 'idx')
res = env.cmd('ft.search', 'idx', 'hello world')
env.assertEqual(10, res[0])
res = env.cmd('ft.search', 'idx', 'foo bar')
env.assertEqual(0, res[0])
res = env.cmd('ft.search', 'idx', '@tags:{foo bar}')
env.assertEqual(N, res[0])
# inorder should not affect tags
res = env.cmd(
'ft.search', 'idx', '@tags:{tag 1} @tags:{foo bar}', 'slop', '0', 'inorder')
env.assertEqual(1, res[0])
for n in range(N - 1):
res = env.cmd(
'ft.search', 'idx', '@tags:{tag %d}' % n, 'nocontent')
env.assertEqual(1, res[0])
env.assertEqual('doc%d' % n, res[1])
res = env.cmd(
'ft.search', 'idx', '@tags:{tag\\ %d}' % n, 'nocontent')
env.assertEqual(1, res[0])
res = env.cmd(
'ft.search', 'idx', 'hello world @tags:{tag\\ %d|tag %d}' % (n, n + 1), 'nocontent')
env.assertEqual(2, res[0])
res = py2sorted(res[1:])
env.assertEqual('doc%d' % n, res[0])
env.assertEqual('doc%d' % (n + 1), res[1])
res = env.cmd(
'ft.search', 'idx', 'term%d @tags:{tag %d}' % (n, n), 'nocontent')
env.assertEqual(1, res[0])
env.assertEqual('doc%d' % n, res[1])
def testSeparator(env):
r = env
env.expect(
'ft.create', 'idx', 'ON', 'HASH',
'schema', 'title', 'text', 'tags', 'tag', 'separator', ':').ok()
env.expect('ft.add', 'idx', 'doc1', 1.0, 'fields',
'title', 'hello world', 'tags', 'x:hello world: fooz bar:foo,bar:BOO FAR').ok()
for _ in r.retry_with_rdb_reload():
waitForIndex(r, 'idx')
for q in ('@tags:{hello world}', '@tags:{fooz bar}', '@tags:{foo\\,bar}', '@tags:{boo\\ far}', '@tags:{x}'):
res = env.cmd('ft.search', 'idx', q)
env.assertEqual(1, res[0])
def testTagPrefix(env):
env.skipOnCluster()
r = env
env.expect(
'ft.create', 'idx', 'ON', 'HASH',
'schema', 'title', 'text', 'tags', 'tag', 'separator', ',').ok()
env.expect('ft.add', 'idx', 'doc1', 1.0, 'fields', 'title', 'hello world',
'tags', 'hello world,hello-world,hell,jell').ok()
env.expect('FT.DEBUG', 'dump_tagidx', 'idx', 'tags') \
.equal([['hell', [1]], ['hello world', [1]], ['hello-world', [1]], ['jell', [1]]])
for _ in r.retry_with_rdb_reload():
waitForIndex(r, 'idx')
for q in ('@tags:{hello world}', '@tags:{hel*}', '@tags:{hello\\-*}', '@tags:{he*}'):
res = env.cmd('ft.search', 'idx', q)
env.assertEqual(res[0], 1)
def testTagFieldCase(env):
r = env
env.expect(
'ft.create', 'idx', 'ON', 'HASH',
'schema', 'title', 'text', 'TAgs', 'tag').ok()
env.expect('ft.add', 'idx', 'doc1', 1.0, 'fields',
'title', 'hello world', 'TAgs', 'HELLO WORLD,FOO BAR').ok()
for _ in r.retry_with_rdb_reload():
waitForIndex(r, 'idx')
env.assertListEqual([0], r.execute_command(
'FT.SEARCH', 'idx', '@tags:{HELLO WORLD}'))
env.assertListEqual([1, 'doc1'], r.execute_command(
'FT.SEARCH', 'idx', '@TAgs:{HELLO WORLD}', 'NOCONTENT'))
env.assertListEqual([1, 'doc1'], r.execute_command(
'FT.SEARCH', 'idx', '@TAgs:{foo bar}', 'NOCONTENT'))
env.assertListEqual([0], r.execute_command(
'FT.SEARCH', 'idx', '@TAGS:{foo bar}', 'NOCONTENT'))
def testInvalidSyntax(env):
r = env
# invalid syntax
with env.assertResponseError():
r.execute_command(
'ft.create', 'idx', 'ON', 'HASH',
'schema', 'title', 'text', 'tags', 'tag', 'separator')
with env.assertResponseError():
r.execute_command(
'ft.create', 'idx', 'ON', 'HASH',
'schema', 'title', 'text', 'tags', 'tag', 'separator', "foo")
with env.assertResponseError():
r.execute_command(
'ft.create', 'idx', 'ON', 'HASH',
'schema', 'title', 'text', 'tags', 'tag', 'separator', "")
def testTagVals(env):
r = env
r.execute_command(
'ft.create', 'idx', 'ON', 'HASH',
'schema', 'title', 'text', 'tags', 'tag', 'othertags', 'tag')
N = 100
alltags = set()
for n in range(N):
tags = ('foo %d' % n, 'bar %d' % n, 'x')
alltags.add(tags[0])
alltags.add(tags[1])
alltags.add(tags[2])
env.assertOk(r.execute_command('ft.add', 'idx', 'doc%d' % n, 1.0, 'fields',
'tags', ','.join(tags), 'othertags', 'baz %d' % int(n // 2)))
for _ in r.retry_with_rdb_reload():
waitForIndex(r, 'idx')
res = r.execute_command('ft.tagvals', 'idx', 'tags')
env.assertEqual(N * 2 + 1, len(res))
env.assertEqual(alltags, set(res))
res = r.execute_command('ft.tagvals', 'idx', 'othertags')
env.assertEqual(N / 2, len(res))
env.expect('ft.tagvals', 'idx').raiseError()
env.expect('ft.tagvals', 'idx', 'idx', 'idx').raiseError()
env.expect('ft.tagvals', 'fake_idx', 'tags').raiseError()
env.expect('ft.tagvals', 'idx', 'fake_tags').raiseError()
env.expect('ft.tagvals', 'idx', 'title').raiseError()
def testSearchNotExistsTagValue(env):
# this test basically make sure we are not leaking
env.expect('FT.CREATE idx ON HASH SCHEMA t TAG SORTABLE').ok()
env.expect('FT.SEARCH idx @t:{val}').equal([0])
def testIssue1305(env):
env.expect('FT.CREATE myIdx ON HASH SCHEMA title TAG').ok()
env.expect('FT.ADD myIdx doc2 1.0 FIELDS title "work"').ok()
env.expect('FT.ADD myIdx doc2 1.0 FIELDS title "hello"').error()
env.expect('FT.ADD myIdx doc3 1.0 FIELDS title "hello"').ok()
env.expect('FT.ADD myIdx doc1 1.0 FIELDS title "hello,work"').ok()
expectedRes = {'doc1' : ['inf', ['title', '"hello,work"']], 'doc3' : ['inf', ['title', '"hello"']], 'doc2' : ['inf', ['title', '"work"']]}
res = env.cmd('ft.search', 'myIdx', '~@title:{wor} ~@title:{hell}', 'WITHSCORES')[1:]
res = {res[i]:res[i + 1: i + 3] for i in range(0, len(res), 3)}
env.assertEqual(res, expectedRes)
def testTagCaseSensitive(env):
conn = getConnectionByEnv(env)
env.expect('FT.CREATE idx1 SCHEMA t TAG').ok()
env.expect('FT.CREATE idx2 SCHEMA t TAG CASESENSITIVE').ok()
env.expect('FT.CREATE idx3 SCHEMA t TAG SEPARATOR .').ok()
env.expect('FT.CREATE idx4 SCHEMA t TAG SEPARATOR . CASESENSITIVE').ok()
env.expect('FT.CREATE idx5 SCHEMA t TAG CASESENSITIVE SEPARATOR .').ok()
conn.execute_command('HSET', 'doc1', 't', 'foo,FOO')
conn.execute_command('HSET', 'doc2', 't', 'FOO')
conn.execute_command('HSET', 'doc3', 't', 'foo')
if not env.is_cluster():
conn.execute_command('FT.CONFIG', 'SET', 'FORK_GC_CLEAN_THRESHOLD', '0')
env.expect('FT.DEBUG', 'dump_tagidx', 'idx1', 't').equal([['foo', [1, 2, 3]]])
env.expect('FT.DEBUG', 'dump_tagidx', 'idx2', 't').equal([['foo', [1, 3]], ['FOO', [1, 2]]])
env.expect('FT.DEBUG', 'dump_tagidx', 'idx3', 't').equal([['foo', [2, 3]], ['foo,foo', [1]]])
env.expect('FT.DEBUG', 'dump_tagidx', 'idx4', 't').equal([['foo', [3]], ['foo,FOO', [1]], ['FOO', [2]]])
env.expect('FT.DEBUG', 'dump_tagidx', 'idx5', 't').equal([['foo', [3]], ['foo,FOO', [1]], ['FOO', [2]]])
env.expect('FT.SEARCH', 'idx1', '@t:{FOO}') \
.equal([3, 'doc1', ['t', 'foo,FOO'], 'doc2', ['t', 'FOO'], 'doc3', ['t', 'foo']])
env.expect('FT.SEARCH', 'idx1', '@t:{foo}') \
.equal([3, 'doc1', ['t', 'foo,FOO'], 'doc2', ['t', 'FOO'], 'doc3', ['t', 'foo']])
env.expect('FT.SEARCH', 'idx2', '@t:{FOO}') \
.equal([2, 'doc1', ['t', 'foo,FOO'], 'doc2', ['t', 'FOO']])
env.expect('FT.SEARCH', 'idx2', '@t:{foo}') \
.equal([2, 'doc1', ['t', 'foo,FOO'], 'doc3', ['t', 'foo']])
conn.execute_command('HSET', 'doc1', 't', 'f o,F O')
conn.execute_command('HSET', 'doc2', 't', 'F O')
conn.execute_command('HSET', 'doc3', 't', 'f o')
if not env.is_cluster():
forceInvokeGC(env, 'idx1')
forceInvokeGC(env, 'idx2')
forceInvokeGC(env, 'idx3')
forceInvokeGC(env, 'idx4')
forceInvokeGC(env, 'idx5')
env.expect('FT.DEBUG', 'dump_tagidx', 'idx1', 't').equal([['f o', [4, 5, 6]]])
env.expect('FT.DEBUG', 'dump_tagidx', 'idx2', 't').equal([['f o', [4, 6]], ['F O', [4, 5]]])
env.expect('FT.DEBUG', 'dump_tagidx', 'idx3', 't').equal([['f o', [5, 6]], ['f o,f o', [4]]])
env.expect('FT.DEBUG', 'dump_tagidx', 'idx4', 't').equal([['f o', [6]], ['f o,F O', [4]], ['F O', [5]]])
env.expect('FT.DEBUG', 'dump_tagidx', 'idx5', 't').equal([['f o', [6]], ['f o,F O', [4]], ['F O', [5]]])
# not casesensitive
env.expect('FT.SEARCH', 'idx1', '@t:{F\\ O}') \
.equal([3, 'doc1', ['t', 'f o,F O'], 'doc2', ['t', 'F O'], 'doc3', ['t', 'f o']])
env.expect('FT.SEARCH', 'idx1', '@t:{f\\ o}') \
.equal([3, 'doc1', ['t', 'f o,F O'], 'doc2', ['t', 'F O'], 'doc3', ['t', 'f o']])
# casesensitive
env.expect('FT.SEARCH', 'idx2', '@t:{F\\ O}') \
.equal([2, 'doc1', ['t', 'f o,F O'], 'doc2', ['t', 'F O']])
env.expect('FT.SEARCH', 'idx2', '@t:{f\\ o}') \
.equal([2, 'doc1', ['t', 'f o,F O'], 'doc3', ['t', 'f o']])
# not casesensitive
env.expect('FT.SEARCH', 'idx3', '@t:{f\\ o\\,f\\ o}') \
.equal([1, 'doc1', ['t', 'f o,F O']])
env.expect('FT.SEARCH', 'idx3', '@t:{f\\ o\\,F\\ O}') \
.equal([1, 'doc1', ['t', 'f o,F O']])
env.expect('FT.SEARCH', 'idx3', '@t:{F\\ O\\,F\\ O}') \
.equal([1, 'doc1', ['t', 'f o,F O']])
env.expect('FT.SEARCH', 'idx3', '@t:{F\\ O}') \
.equal([2, 'doc2', ['t', 'F O'], 'doc3', ['t', 'f o']])
env.expect('FT.SEARCH', 'idx3', '@t:{f\\ o}') \
.equal([2, 'doc2', ['t', 'F O'], 'doc3', ['t', 'f o']])
# casesensitive
env.expect('FT.SEARCH', 'idx4', '@t:{f\\ o\\,f\\ o}') \
.equal([0])
env.expect('FT.SEARCH', 'idx4', '@t:{f\\ o\\,F\\ O}') \
.equal([1, 'doc1', ['t', 'f o,F O']])
env.expect('FT.SEARCH', 'idx4', '@t:{F\\ O\\,F\\ O}') \
.equal([0])
env.expect('FT.SEARCH', 'idx4', '@t:{F\\ O}') \
.equal([1, 'doc2', ['t', 'F O']])
env.expect('FT.SEARCH', 'idx4', '@t:{f\\ o}') \
.equal([1, 'doc3', ['t', 'f o']])
def testTagGCClearEmpty(env):
env.skipOnCluster()
conn = getConnectionByEnv(env)
conn.execute_command('FT.CONFIG', 'SET', 'FORK_GC_CLEAN_THRESHOLD', '0')
conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TAG')
conn.execute_command('HSET', 'doc1', 't', 'foo')
conn.execute_command('HSET', 'doc2', 't', 'bar')
conn.execute_command('HSET', 'doc3', 't', 'baz')
env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([['foo', [1]], ['bar', [2]], ['baz', [3]]])
env.expect('FT.SEARCH', 'idx', '@t:{foo}').equal([1, 'doc1', ['t', 'foo']])
# delete two tags
conn.execute_command('DEL', 'doc1')
conn.execute_command('DEL', 'doc2')
forceInvokeGC(env, 'idx')
env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([['baz', [3]]])
env.expect('FT.SEARCH', 'idx', '@t:{foo}').equal([0])
# delete last tag
conn.execute_command('DEL', 'doc3')
forceInvokeGC(env, 'idx')
env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([])
# check term can be used after being empty
conn.execute_command('HSET', 'doc4', 't', 'foo')
conn.execute_command('HSET', 'doc5', 't', 'foo')
env.expect('FT.SEARCH', 'idx', '@t:{foo}') \
.equal([2, 'doc4', ['t', 'foo'], 'doc5', ['t', 'foo']])
def testTagGCClearEmptyWithCursor(env):
env.skipOnCluster()
conn = getConnectionByEnv(env)
conn.execute_command('FT.CONFIG', 'SET', 'FORK_GC_CLEAN_THRESHOLD', '0')
conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TAG')
conn.execute_command('HSET', 'doc1', 't', 'foo')
conn.execute_command('HSET', 'doc2', 't', 'foo')
env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([['foo', [1, 2]]])
res, cursor = env.cmd('FT.AGGREGATE', 'idx', '@t:{foo}', 'WITHCURSOR', 'COUNT', '1')
env.assertEqual(res, [1, []])
# delete both documents and run the GC to clean 'foo' inverted index
env.expect('DEL', 'doc1').equal(1)
env.expect('DEL', 'doc2').equal(1)
forceInvokeGC(env, 'idx')
# make sure the inverted index was cleaned
env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([])
# read from the cursor
res, cursor = env.cmd('FT.CURSOR', 'READ', 'idx', cursor)
env.assertEqual(res, [0])
env.assertEqual(cursor, 0)
def testTagGCClearEmptyWithCursorAndMoreData(env):
env.skipOnCluster()
conn = getConnectionByEnv(env)
conn.execute_command('FT.CONFIG', 'SET', 'FORK_GC_CLEAN_THRESHOLD', '0')
conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TAG')
conn.execute_command('HSET', 'doc1', 't', 'foo')
conn.execute_command('HSET', 'doc2', 't', 'foo')
env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([['foo', [1, 2]]])
res, cursor = env.cmd('FT.AGGREGATE', 'idx', '@t:{foo}', 'WITHCURSOR', 'COUNT', '1')
env.assertEqual(res, [1, []])
# delete both documents and run the GC to clean 'foo' inverted index
env.expect('DEL', 'doc1').equal(1)
env.expect('DEL', 'doc2').equal(1)
forceInvokeGC(env, 'idx')
# make sure the inverted index was cleaned
env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([])
# add data
conn.execute_command('HSET', 'doc3', 't', 'foo')
conn.execute_command('HSET', 'doc4', 't', 'foo')
env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([['foo', [3, 4]]])
# read from the cursor
res, cursor = conn.execute_command('FT.CURSOR', 'READ', 'idx', cursor)
env.assertEqual(res, [0])
env.assertEqual(cursor, 0)
# ensure later documents with same tag are read
res = conn.execute_command('FT.AGGREGATE', 'idx', '@t:{foo}')
env.assertEqual(res, [1, [], []])
@unstable
def testEmptyTagLeak(env):
env.skipOnCluster()
cycles = 1
tags = 30
conn = getConnectionByEnv(env)
conn.execute_command('FT.CONFIG', 'SET', 'FORK_GC_CLEAN_THRESHOLD', '0')
conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TAG')
pl = conn.pipeline()
for i in range(cycles):
for j in range(tags):
x = j + i * tags
pl.execute_command('HSET', 'doc{}'.format(x), 't', 'tag{}'.format(x))
pl.execute()
for j in range(tags):
pl.execute_command('DEL', 'doc{}'.format(j + i * tags))
pl.execute()
forceInvokeGC(env, 'idx')
env.expect('FT.DEBUG', 'DUMP_TAGIDX', 'idx', 't').equal([])
| 42.184783
| 142
| 0.523448
| 2,093
| 15,524
| 3.832776
| 0.090301
| 0.078534
| 0.090501
| 0.046622
| 0.753677
| 0.708053
| 0.648716
| 0.603216
| 0.571429
| 0.51446
| 0
| 0.022653
| 0.229387
| 15,524
| 367
| 143
| 42.299728
| 0.647914
| 0.03646
| 0
| 0.43662
| 0
| 0
| 0.284643
| 0.009506
| 0
| 0
| 0
| 0
| 0.116197
| 1
| 0.049296
| false
| 0
| 0.007042
| 0.003521
| 0.059859
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c0252cbabe1a0b566b1ac4670f0fdedec520c7a
| 370
|
py
|
Python
|
Part1/AverageAccuracy.py
|
efkandurakli/Graduation-Project1
|
fd2cba89929da2cef49ec67214b54c310b57ce01
|
[
"MIT"
] | 1
|
2019-12-18T08:16:55.000Z
|
2019-12-18T08:16:55.000Z
|
Part1/AverageAccuracy.py
|
efkandurakli/Graduation-Project1
|
fd2cba89929da2cef49ec67214b54c310b57ce01
|
[
"MIT"
] | null | null | null |
Part1/AverageAccuracy.py
|
efkandurakli/Graduation-Project1
|
fd2cba89929da2cef49ec67214b54c310b57ce01
|
[
"MIT"
] | null | null | null |
import numpy as np
from operator import truediv
def AA_andEachClassAccuracy(confusion_matrix):
counter = confusion_matrix.shape[0]
list_diag = np.diag(confusion_matrix)
list_raw_sum = np.sum(confusion_matrix, axis=1)
each_acc = np.nan_to_num(truediv(list_diag, list_raw_sum))
average_acc = np.mean(each_acc)
return each_acc, average_acc
| 37
| 63
| 0.751351
| 56
| 370
| 4.642857
| 0.517857
| 0.230769
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006515
| 0.17027
| 370
| 10
| 64
| 37
| 0.840391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.222222
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c0b22579bc28f35e8719b18a2963cb1c1518847
| 2,687
|
py
|
Python
|
cubi_tk/snappy/kickoff.py
|
LaborBerlin/cubi-tk
|
4aa5306c547c38eb41d5623ff6e4bace828f85b1
|
[
"MIT"
] | null | null | null |
cubi_tk/snappy/kickoff.py
|
LaborBerlin/cubi-tk
|
4aa5306c547c38eb41d5623ff6e4bace828f85b1
|
[
"MIT"
] | null | null | null |
cubi_tk/snappy/kickoff.py
|
LaborBerlin/cubi-tk
|
4aa5306c547c38eb41d5623ff6e4bace828f85b1
|
[
"MIT"
] | null | null | null |
"""``cubi-tk snappy kickoff``: kickoff SNAPPY pipeline."""
import argparse
import os
import subprocess
import typing
from logzero import logger
from toposort import toposort
from . import common
from cubi_tk.exceptions import ParseOutputException
def run(
args, _parser: argparse.ArgumentParser, _subparser: argparse.ArgumentParser
) -> typing.Optional[int]:
logger.info("Try to find SNAPPY pipeline directory...")
try:
path = common.find_snappy_root_dir(args.path or os.getcwd(), common.DEPENDENCIES.keys())
except common.CouldNotFindPipelineRoot:
return 1
# TODO: this assumes standard naming which is a limitation...
logger.info("Looking for pipeline directories (assuming standard naming)...")
logger.debug("Looking in %s", path)
step_set = {name for name in common.DEPENDENCIES if (path / name).exists()}
steps: typing.List[str] = []
for names in toposort({k: set(v) for k, v in common.DEPENDENCIES.items()}):
steps += [name for name in names if name in step_set]
logger.info("Will run the steps: %s", ", ".join(steps))
logger.info("Submitting with sbatch...")
jids: typing.Dict[str, str] = {}
for step in steps:
dep_jids = [jids[dep] for dep in common.DEPENDENCIES[step] if dep in jids]
cmd = ["sbatch"]
if dep_jids:
cmd += ["--dependency", "afterok:%s" % ":".join(map(str, dep_jids))]
cmd += ["pipeline_job.sh"]
logger.info("Submitting step %s: %s", step, " ".join(cmd))
if args.dry_run:
jid = "<%s>" % step
else:
stdout_raw = subprocess.check_output(cmd, cwd=str(path / step), timeout=args.timeout)
stdout = stdout_raw.decode("utf-8")
if not stdout.startswith("Submitted batch job "):
raise ParseOutputException("Did not understand sbatch output: %s" % stdout)
jid = stdout.split()[-1]
logger.info(" => JID: %s", jid)
jids[step] = jid
return None
def setup_argparse(parser: argparse.ArgumentParser) -> None:
"""Setup argument parser for ``cubi-tk snappy pull-sheet``."""
parser.add_argument("--hidden-cmd", dest="snappy_cmd", default=run, help=argparse.SUPPRESS)
parser.add_argument(
"--dry-run",
"-n",
default=False,
action="store_true",
help="Perform dry-run, do not do anything.",
)
parser.add_argument(
"--timeout", default=10, type=int, help="Number of seconds to wait for commands."
)
parser.add_argument(
"path",
nargs="?",
help="Path into SNAPPY directory (below a directory containing .snappy_pipeline).",
)
| 34.448718
| 97
| 0.633048
| 338
| 2,687
| 4.961538
| 0.39645
| 0.035778
| 0.040549
| 0.015504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002427
| 0.233346
| 2,687
| 77
| 98
| 34.896104
| 0.81165
| 0.063268
| 0
| 0.05
| 0
| 0
| 0.205108
| 0
| 0
| 0
| 0
| 0.012987
| 0
| 1
| 0.033333
| false
| 0
| 0.133333
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c100a9a9e5db785c7efc1726ba5b0b98ff396a7
| 2,469
|
py
|
Python
|
src/printReport.py
|
griimx/Summer-2016
|
08bf0a68a0e12ee81318409f68448adaf75983fe
|
[
"MIT"
] | null | null | null |
src/printReport.py
|
griimx/Summer-2016
|
08bf0a68a0e12ee81318409f68448adaf75983fe
|
[
"MIT"
] | null | null | null |
src/printReport.py
|
griimx/Summer-2016
|
08bf0a68a0e12ee81318409f68448adaf75983fe
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from connection import *
from jinja2 import Environment, FileSystemLoader
import webbrowser
def print_report(id):
env = Environment(loader=FileSystemLoader('.'))
template = env.get_template("src/template.html")
cursor = db.cursor(MySQLdb.cursors.DictCursor)
sql = "SELECT e.*, b.*, d.`depName` "
sql += "FROM `employees` e, `baccounts` b, `departments` d "
sql +="WHERE e.`empID` = b.`empdb_empID` "
sql +="AND e.`depDB_depID` = d.`depID` "
sql +="AND e.`empID` = '"+ id +"'"
# print(sql)
cursor.execute(sql)
result = cursor.fetchall()
# print(result[0])
result = result[0]
print(result)
template_vars = {"empID" : result['empID'],
"firstName" : result['firstName'],
"lastName" : result['lastName'],
"address" : result['address'],
"pin" : result['pin'],
"state" : result['state'],
"adharID" : result['adharID'],
"panID" : result['panID'],
"designation" : result['designation'],
"unit" : result['unit'],
"email" : result['email'],
"mobile" : result['mobile'],
"depName" : result['depName'],
"IFSC" : result['IFSC'],
"ACNo" : result['ACNo'],
"BranchAdd" : result['BranchAdd']
}
content = template.render(template_vars)
with open('print.html', 'w') as static_file:
static_file.write(content)
webbrowser.open_new_tab('print.html')
# self.entry_text(self.entry_name, result['firstName']+" "+result['lastName'] )
# self.entry_text(self.entry_EmpID, result['empID'])
# self.entry_text(self.entry_EmpName, result['firstName']+" "+result['lastName'])
# self.entry_text(self.entry_personalno, result['empID'])
# self.entry_text(self.entry_address,result['address'] )
# self.entry_text(self.entry_pin, result['pin'])
# self.entry_text(self.entry_state, result['state'])
# self.entry_text(self.entry_adhar, result['adharID'])
# self.entry_text(self.entry_pan, result['panID'])
# self.entry_text(self.entry_designation, result['designation'])
# self.entry_text(self.entry_unit, result['unit'])
# self.entry_text(self.entry_emailid, result['email'])
# self.entry_text(self.entry_mobile, result['mobile'])
# self.entry_text(self.entry_department, result['depName'])
# self.entry_text(self.entry_ifsc, result['IFSC'])
# self.entry_text(self.enrtry_acno, result['ACNo'])
# self.entry_text(self.entry_branch, result['BranchAdd'])
| 37.409091
| 86
| 0.654111
| 298
| 2,469
| 5.255034
| 0.271812
| 0.189655
| 0.141124
| 0.184547
| 0.275862
| 0.10728
| 0.10728
| 0.065134
| 0.065134
| 0
| 0
| 0.001456
| 0.165249
| 2,469
| 65
| 87
| 37.984615
| 0.75837
| 0.430539
| 0
| 0
| 0
| 0
| 0.289322
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026316
| false
| 0
| 0.105263
| 0
| 0.131579
| 0.131579
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c13630030f6d62b875010ab48a5f1a305094328
| 1,266
|
py
|
Python
|
nadmin/plugins/sortable.py
|
A425/django-xadmin-1.8
|
9ab06192311b22ec654778935ce3e3c5ffd39a00
|
[
"MIT"
] | 1
|
2015-10-10T08:04:26.000Z
|
2015-10-10T08:04:26.000Z
|
nadmin/plugins/sortable.py
|
A425/django-xadmin-1.8
|
9ab06192311b22ec654778935ce3e3c5ffd39a00
|
[
"MIT"
] | 1
|
2016-03-25T01:41:36.000Z
|
2016-03-25T01:41:36.000Z
|
nadmin/plugins/sortable.py
|
A425/django-xadmin-1.8
|
9ab06192311b22ec654778935ce3e3c5ffd39a00
|
[
"MIT"
] | null | null | null |
#coding:utf-8
from nadmin.sites import site
from nadmin.views import BaseAdminPlugin, ListAdminView
SORTBY_VAR = '_sort_by'
class SortablePlugin(BaseAdminPlugin):
sortable_fields = ['sort']
# Media
def get_media(self, media):
if self.sortable_fields and self.request.GET.get(SORTBY_VAR):
media = media + self.vendor('nadmin.plugin.sortable.js')
return media
# Block Views
def block_top_toolbar(self, context, nodes):
if self.sortable_fields:
pass
# current_refresh = self.request.GET.get(REFRESH_VAR)
# context.update({
# 'has_refresh': bool(current_refresh),
# 'clean_refresh_url': self.admin_view.get_query_string(remove=(REFRESH_VAR,)),
# 'current_refresh': current_refresh,
# 'refresh_times': [{
# 'time': r,
# 'url': self.admin_view.get_query_string({REFRESH_VAR: r}),
# 'selected': str(r) == current_refresh,
# } for r in self.refresh_times],
# })
# nodes.append(loader.render_to_string('nadmin/blocks/refresh.html', context_instance=context))
site.register_plugin(SortablePlugin, ListAdminView)
| 34.216216
| 107
| 0.611374
| 139
| 1,266
| 5.330935
| 0.453237
| 0.094467
| 0.037787
| 0.053981
| 0.080972
| 0.080972
| 0.080972
| 0
| 0
| 0
| 0
| 0.001093
| 0.277251
| 1,266
| 36
| 108
| 35.166667
| 0.808743
| 0.436809
| 0
| 0
| 0
| 0
| 0.053009
| 0.035817
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0.076923
| 0.153846
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
9c1453b1473bf17ef5373079c50724a0067a38a2
| 3,311
|
py
|
Python
|
rotkehlchen/tests/integration/test_blockchain.py
|
coblee/rotki
|
d675f5c2d0df5176337b7b10038524ee74923482
|
[
"BSD-3-Clause"
] | null | null | null |
rotkehlchen/tests/integration/test_blockchain.py
|
coblee/rotki
|
d675f5c2d0df5176337b7b10038524ee74923482
|
[
"BSD-3-Clause"
] | 3
|
2021-01-28T21:30:46.000Z
|
2022-03-25T19:17:00.000Z
|
rotkehlchen/tests/integration/test_blockchain.py
|
coblee/rotki
|
d675f5c2d0df5176337b7b10038524ee74923482
|
[
"BSD-3-Clause"
] | null | null | null |
import operator
import os
from unittest.mock import patch
import pytest
import requests
from rotkehlchen.chain.ethereum.manager import NodeName
from rotkehlchen.constants.assets import A_BTC
from rotkehlchen.tests.utils.blockchain import mock_etherscan_query
from rotkehlchen.typing import SupportedBlockchain
@pytest.mark.skipif(
os.name == 'nt',
reason='Not testing running with geth in windows at the moment',
)
@pytest.mark.parametrize('have_blockchain_backend', [True])
def test_eth_connection_initial_balances(
blockchain,
inquirer, # pylint: disable=unused-argument
):
"""TODO for this test. Either:
1. Not use own chain but use a normal open node for this test.
2. If we use own chain, deploy the eth-scan contract there.
But probably (1) makes more sense
"""
msg = 'Should be connected to ethereum node'
assert blockchain.ethereum.web3_mapping.get(NodeName.OWN) is not None, msg
def test_query_btc_balances(blockchain):
blockchain.query_btc_balances()
assert 'BTC' not in blockchain.totals
account = '3BZU33iFcAiyVyu2M2GhEpLNuh81GymzJ7'
blockchain.modify_btc_account(account, 'append', operator.add)
blockchain.query_btc_balances()
assert blockchain.totals[A_BTC].usd_value is not None
assert blockchain.totals[A_BTC].amount is not None
@pytest.mark.parametrize('number_of_eth_accounts', [0])
def test_add_remove_account_assure_all_balances_not_always_queried(blockchain):
"""Due to a programming mistake at addition and removal of blockchain accounts
after the first time all balances were queried every time. That slowed
everything down (https://github.com/rotki/rotki/issues/678).
This is a regression test for that behaviour
TODO: Is this still needed? Shouldn't it just be removed?
Had to add lots of mocks to make it not be a slow test
"""
addr1 = '0xe188c6BEBB81b96A65aa20dDB9e2aef62627fa4c'
addr2 = '0x78a087fCf440315b843632cFd6FDE6E5adcCc2C2'
etherscan_patch = mock_etherscan_query(
eth_map={addr1: {'ETH': 1}, addr2: {'ETH': 2}},
etherscan=blockchain.ethereum.etherscan,
original_requests_get=requests.get,
original_queries=[],
)
ethtokens_max_chunks_patch = patch(
'rotkehlchen.chain.ethereum.tokens.ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH',
new=800,
)
with etherscan_patch, ethtokens_max_chunks_patch:
blockchain.add_blockchain_accounts(
blockchain=SupportedBlockchain.ETHEREUM,
accounts=[addr1],
)
assert addr1 in blockchain.accounts.eth
with etherscan_patch, ethtokens_max_chunks_patch, patch.object(blockchain, 'query_balances') as mock: # noqa: E501
blockchain.remove_blockchain_accounts(
blockchain=SupportedBlockchain.ETHEREUM,
accounts=[addr1],
)
assert addr1 not in blockchain.accounts.eth
assert mock.call_count == 0, 'blockchain.query_balances() should not have been called'
addr2 = '0x78a087fCf440315b843632cFd6FDE6E5adcCc2C2'
with etherscan_patch, ethtokens_max_chunks_patch, patch.object(blockchain, 'query_balances') as mock: # noqa: E501
blockchain.add_blockchain_accounts(
blockchain=SupportedBlockchain.ETHEREUM,
accounts=[addr2],
)
| 36.788889
| 119
| 0.735125
| 406
| 3,311
| 5.82266
| 0.406404
| 0.045685
| 0.030457
| 0.038917
| 0.259729
| 0.201354
| 0.201354
| 0.18401
| 0.150592
| 0.083756
| 0
| 0.039048
| 0.187859
| 3,311
| 89
| 120
| 37.202247
| 0.840089
| 0.18363
| 0
| 0.213115
| 0
| 0
| 0.174094
| 0.112538
| 0
| 0
| 0.047583
| 0.022472
| 0.114754
| 1
| 0.04918
| false
| 0
| 0.147541
| 0
| 0.196721
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c2147f6458e9854c24fb91bf25b8791fe2188ff
| 528
|
py
|
Python
|
src/supplier/templates/supplier/urls.py
|
vandana0608/Pharmacy-Managament
|
f99bdec11c24027a432858daa19247a21cecc092
|
[
"bzip2-1.0.6"
] | null | null | null |
src/supplier/templates/supplier/urls.py
|
vandana0608/Pharmacy-Managament
|
f99bdec11c24027a432858daa19247a21cecc092
|
[
"bzip2-1.0.6"
] | null | null | null |
src/supplier/templates/supplier/urls.py
|
vandana0608/Pharmacy-Managament
|
f99bdec11c24027a432858daa19247a21cecc092
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
path('', views.SupplierList.as_view(), name='supplier_list'),
path('view/<int:pk>', views.SupplierView.as_view(), name='supplier_view'),
path('new', views.SupplierCreate.as_view(), name='supplier_new'),
path('view/<int:pk>', views.SupplierView.as_view(), name='supplier_view'),
path('edit/<int:pk>', views.SupplierUpdate.as_view(), name='supplier_edit'),
path('delete/<int:pk>', views.SupplierDelete.as_view(), name='supplier_delete'),
]
| 44
| 84
| 0.69697
| 70
| 528
| 5.085714
| 0.314286
| 0.101124
| 0.168539
| 0.303371
| 0.314607
| 0.314607
| 0.314607
| 0.314607
| 0.314607
| 0.314607
| 0
| 0
| 0.104167
| 528
| 12
| 85
| 44
| 0.752643
| 0
| 0
| 0.2
| 0
| 0
| 0.257089
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c22036370e0f940a80ab34156b825acd98d5b1a
| 205
|
py
|
Python
|
web_scraper/extract/common.py
|
rarc41/web_scraper_pro
|
f297c785617c6b1617ced8f29ad11afec31f2968
|
[
"MIT"
] | null | null | null |
web_scraper/extract/common.py
|
rarc41/web_scraper_pro
|
f297c785617c6b1617ced8f29ad11afec31f2968
|
[
"MIT"
] | null | null | null |
web_scraper/extract/common.py
|
rarc41/web_scraper_pro
|
f297c785617c6b1617ced8f29ad11afec31f2968
|
[
"MIT"
] | null | null | null |
import yaml
__config=None
def config():
global __config
if not __config:
with open('config.yaml', mode='r') as f:
__config=yaml.safe_load(f)
return __config
| 15.769231
| 48
| 0.585366
| 26
| 205
| 4.192308
| 0.653846
| 0.183486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.321951
| 205
| 13
| 49
| 15.769231
| 0.784173
| 0
| 0
| 0
| 0
| 0
| 0.058252
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.125
| 0
| 0.375
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c26d711887f84da99433b770df53c3bffc460c4
| 1,067
|
py
|
Python
|
Python/Vowel-Substring/solution.py
|
arpitran/HackerRank_solutions
|
a3a77c858edd3955ea38530916db9051b1aa93f9
|
[
"MIT"
] | null | null | null |
Python/Vowel-Substring/solution.py
|
arpitran/HackerRank_solutions
|
a3a77c858edd3955ea38530916db9051b1aa93f9
|
[
"MIT"
] | null | null | null |
Python/Vowel-Substring/solution.py
|
arpitran/HackerRank_solutions
|
a3a77c858edd3955ea38530916db9051b1aa93f9
|
[
"MIT"
] | null | null | null |
#!/bin/python3
import math
import os
import random
import re
import sys
#
# Complete the 'findSubstring' function below.
#
# The function is expected to return a STRING.
# The function accepts following parameters:
# 1. STRING s
# 2. INTEGER k
#
def isVowel(x):
if(x=="a" or x=='e' or x=='i' or x=='o' or x=='u'):
return True
return False
def vowelcount(x):
lowercase = x.lower()
vowel_counts = {}
for vowel in "aeiou":
count = lowercase.count(vowel)
vowel_counts[vowel] = count
counts = vowel_counts.values()
total_vowels = sum(counts)
return total_vowels
def findSubstring(s, k):
test_str = s
count = 0
sub_string = {}
res = [test_str[i: j] for i in range(len(test_str)) for j in range(i+1, len(test_str)+1) if len(test_str[i:j])==k]
for i in res:
sub_string[i]=vowelcount(i)
if sub_string.get(max(sub_string,key=sub_string.get))==0:
return "Not found!"
else:
return str(max(sub_string,key=sub_string.get))
# Write your code here
| 20.519231
| 118
| 0.626992
| 167
| 1,067
| 3.904192
| 0.413174
| 0.096626
| 0.046012
| 0.027607
| 0.082822
| 0.082822
| 0.082822
| 0
| 0
| 0
| 0
| 0.008717
| 0.247423
| 1,067
| 51
| 119
| 20.921569
| 0.803238
| 0.181818
| 0
| 0
| 0
| 0
| 0.023175
| 0
| 0
| 0
| 0
| 0.019608
| 0
| 1
| 0.103448
| false
| 0
| 0.172414
| 0
| 0.448276
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c2c850b8212d47e83a1fb645622cfcbef2e844f
| 7,385
|
py
|
Python
|
python/tink/jwt/_raw_jwt.py
|
cuonglm/tink
|
df5fa42e45b4d43aac6c3506ceba2956b79a62b8
|
[
"Apache-2.0"
] | null | null | null |
python/tink/jwt/_raw_jwt.py
|
cuonglm/tink
|
df5fa42e45b4d43aac6c3506ceba2956b79a62b8
|
[
"Apache-2.0"
] | null | null | null |
python/tink/jwt/_raw_jwt.py
|
cuonglm/tink
|
df5fa42e45b4d43aac6c3506ceba2956b79a62b8
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
"""The raw JSON Web Token (JWT)."""
import copy
import datetime
import json
from typing import cast, Mapping, Set, List, Dict, Optional, Text, Union, Any
from tink import core
from tink.jwt import _jwt_error
from tink.jwt import _jwt_format
_REGISTERED_NAMES = frozenset({'iss', 'sub', 'jti', 'aud', 'exp', 'nbf', 'iat'})
_MAX_TIMESTAMP_VALUE = 253402300799 # 31 Dec 9999, 23:59:59 GMT
Claim = Union[None, bool, int, float, Text, List[Any], Dict[Text, Any]]
def _from_datetime(t: datetime.datetime) -> float:
if not t.tzinfo:
raise _jwt_error.JwtInvalidError('datetime must have tzinfo')
return t.timestamp()
def _to_datetime(timestamp: float) -> datetime.datetime:
return datetime.datetime.fromtimestamp(timestamp, datetime.timezone.utc)
def _validate_custom_claim_name(name: Text) -> None:
if name in _REGISTERED_NAMES:
raise _jwt_error.JwtInvalidError(
'registered name %s cannot be custom claim name' % name)
class RawJwt(object):
"""A raw JSON Web Token (JWT).
It can be signed to obtain a compact JWT. It is also used as a parse token
that has not yet been verified.
"""
def __new__(cls):
raise core.TinkError('RawJwt cannot be instantiated directly.')
def __init__(self, type_header: Optional[Text], payload: Dict[Text,
Any]) -> None:
# No need to copy payload, because only create and from_json_payload
# call this method.
if not isinstance(payload, Dict):
raise _jwt_error.JwtInvalidError('payload must be a dict')
self._type_header = type_header
self._payload = payload
self._validate_string_claim('iss')
self._validate_string_claim('sub')
self._validate_string_claim('jti')
self._validate_timestamp_claim('exp')
self._validate_timestamp_claim('nbf')
self._validate_timestamp_claim('iat')
self._validate_audience_claim()
def _validate_string_claim(self, name: Text):
if name in self._payload:
if not isinstance(self._payload[name], Text):
raise _jwt_error.JwtInvalidError('claim %s must be a String' % name)
def _validate_timestamp_claim(self, name: Text):
if name in self._payload:
timestamp = self._payload[name]
if not isinstance(timestamp, (int, float)):
raise _jwt_error.JwtInvalidError('claim %s must be a Number' % name)
if timestamp > _MAX_TIMESTAMP_VALUE or timestamp < 0:
raise _jwt_error.JwtInvalidError(
'timestamp of claim %s is out of range' % name)
def _validate_audience_claim(self):
if 'aud' in self._payload:
audiences = self._payload['aud']
if isinstance(audiences, Text):
self._payload['aud'] = [audiences]
return
if not isinstance(audiences, list) or not audiences:
raise _jwt_error.JwtInvalidError('audiences must be a non-empty list')
if not all(isinstance(value, Text) for value in audiences):
raise _jwt_error.JwtInvalidError('audiences must only contain Text')
# TODO(juerg): Consider adding a raw_ prefix to all access methods
def has_type_header(self) -> bool:
return self._type_header is not None
def type_header(self) -> Text:
if not self.has_type_header():
raise KeyError('type header is not set')
return self._type_header
def has_issuer(self) -> bool:
return 'iss' in self._payload
def issuer(self) -> Text:
return cast(Text, self._payload['iss'])
def has_subject(self) -> bool:
return 'sub' in self._payload
def subject(self) -> Text:
return cast(Text, self._payload['sub'])
def has_audiences(self) -> bool:
return 'aud' in self._payload
def audiences(self) -> List[Text]:
return list(self._payload['aud'])
def has_jwt_id(self) -> bool:
return 'jti' in self._payload
def jwt_id(self) -> Text:
return cast(Text, self._payload['jti'])
def has_expiration(self) -> bool:
return 'exp' in self._payload
def expiration(self) -> datetime.datetime:
return _to_datetime(self._payload['exp'])
def has_not_before(self) -> bool:
return 'nbf' in self._payload
def not_before(self) -> datetime.datetime:
return _to_datetime(self._payload['nbf'])
def has_issued_at(self) -> bool:
return 'iat' in self._payload
def issued_at(self) -> datetime.datetime:
return _to_datetime(self._payload['iat'])
def custom_claim_names(self) -> Set[Text]:
return {n for n in self._payload.keys() if n not in _REGISTERED_NAMES}
def custom_claim(self, name: Text) -> Claim:
_validate_custom_claim_name(name)
value = self._payload[name]
if isinstance(value, (list, dict)):
return copy.deepcopy(value)
else:
return value
def json_payload(self) -> Text:
"""Returns the payload encoded as JSON string."""
return _jwt_format.json_dumps(self._payload)
@classmethod
def create(cls,
*,
type_header: Optional[Text] = None,
issuer: Optional[Text] = None,
subject: Optional[Text] = None,
audiences: Optional[List[Text]] = None,
jwt_id: Optional[Text] = None,
expiration: Optional[datetime.datetime] = None,
not_before: Optional[datetime.datetime] = None,
issued_at: Optional[datetime.datetime] = None,
custom_claims: Mapping[Text, Claim] = None) -> 'RawJwt':
"""Create a new RawJwt instance."""
payload = {}
if issuer:
payload['iss'] = issuer
if subject:
payload['sub'] = subject
if jwt_id is not None:
payload['jti'] = jwt_id
if audiences is not None:
payload['aud'] = copy.copy(audiences)
if expiration:
payload['exp'] = _from_datetime(expiration)
if not_before:
payload['nbf'] = _from_datetime(not_before)
if issued_at:
payload['iat'] = _from_datetime(issued_at)
if custom_claims:
for name, value in custom_claims.items():
_validate_custom_claim_name(name)
if not isinstance(name, Text):
raise _jwt_error.JwtInvalidError('claim name must be Text')
if (value is None or isinstance(value, (bool, int, float, Text))):
payload[name] = value
elif isinstance(value, list):
payload[name] = json.loads(json.dumps(value))
elif isinstance(value, dict):
payload[name] = json.loads(json.dumps(value))
else:
raise _jwt_error.JwtInvalidError('claim %s has unknown type' % name)
raw_jwt = object.__new__(cls)
raw_jwt.__init__(type_header, payload)
return raw_jwt
@classmethod
def from_json(cls, type_header: Optional[Text], payload: Text) -> 'RawJwt':
"""Creates a RawJwt from payload encoded as JSON string."""
raw_jwt = object.__new__(cls)
raw_jwt.__init__(type_header, _jwt_format.json_loads(payload))
return raw_jwt
| 34.189815
| 80
| 0.677319
| 997
| 7,385
| 4.811434
| 0.198596
| 0.057328
| 0.02981
| 0.05837
| 0.211799
| 0.150302
| 0.143215
| 0.07734
| 0.047947
| 0.015843
| 0
| 0.005703
| 0.21652
| 7,385
| 215
| 81
| 34.348837
| 0.823367
| 0.133785
| 0
| 0.10596
| 0
| 0
| 0.075287
| 0
| 0
| 0
| 0
| 0.004651
| 0
| 1
| 0.192053
| false
| 0
| 0.046358
| 0.112583
| 0.410596
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
9c2cdfaea02de247b5a0a427743330312fb34eb8
| 16,904
|
py
|
Python
|
dialogue-engine/test/programytest/config/file/test_json.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 104
|
2020-03-30T09:40:00.000Z
|
2022-03-06T22:34:25.000Z
|
dialogue-engine/test/programytest/config/file/test_json.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 25
|
2020-06-12T01:36:35.000Z
|
2022-02-19T07:30:44.000Z
|
dialogue-engine/test/programytest/config/file/test_json.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 10
|
2020-04-02T23:43:56.000Z
|
2021-05-14T13:47:01.000Z
|
"""
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import os
from programy.config.file.json_file import JSONConfigurationFile
from programy.clients.events.console.config import ConsoleConfiguration
from programy.utils.substitutions.substitues import Substitutions
from programytest.config.file.base_file_tests import ConfigurationBaseFileTests
class JSONConfigurationFileTests(ConfigurationBaseFileTests):
def test_get_methods(self):
config_data = JSONConfigurationFile()
self.assertIsNotNone(config_data)
configuration = config_data.load_from_text("""
{
"brain": {
"overrides": {
"allow_system_aiml": true,
"allow_learn_aiml": true,
"allow_learnf_aiml": true
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
section = config_data.get_section("brainx")
self.assertIsNone(section)
section = config_data.get_section("brain")
self.assertIsNotNone(section)
child_section = config_data.get_section("overrides", section)
self.assertIsNotNone(child_section)
keys = list(config_data.get_child_section_keys("overrides", section))
self.assertIsNotNone(keys)
self.assertEqual(3, len(keys))
self.assertTrue("allow_system_aiml" in keys)
self.assertTrue("allow_learn_aiml" in keys)
self.assertTrue("allow_learnf_aiml" in keys)
self.assertIsNone(config_data.get_child_section_keys("missing", section))
self.assertEqual(True, config_data.get_option(child_section, "allow_system_aiml"))
self.assertEqual(True, config_data.get_option(child_section, "missing", missing_value=True))
self.assertEqual(True, config_data.get_bool_option(child_section, "allow_system_aiml"))
self.assertEqual(False, config_data.get_bool_option(child_section, "other_value"))
self.assertEqual(0, config_data.get_int_option(child_section, "other_value"))
def test_load_from_file(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_file(os.path.dirname(__file__) + os.sep + "test_json.json", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assert_configuration(configuration)
def test_load_from_text_multis_one_value(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"bot": {
"brain": "bot1"
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assertEqual(1, len(configuration.client_configuration.configurations[0].configurations))
def test_load_from_text_multis_multiple_values(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"console": {
"bot": "bot"
},
"bot": {
"brain": ["bot1", "bot2"]
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assertEqual(2, len(configuration.client_configuration.configurations[0].configurations))
def test_load_from_text(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"console": {
"bot": "bot",
"prompt": ">>>",
"scheduler": {
"name": "Scheduler1",
"debug_level": 50,
"add_listeners": false,
"remove_all_jobs": false
},
"storage": {
"entities": {
"users": "sql",
"linked_accounts": "sql",
"links": "sql",
"properties": "file",
"conversations": "file",
"categories": "file",
"maps": "file",
"sets": "file",
"rdf": "file",
"denormal": "file",
"normal": "file",
"gender": "file",
"person": "file",
"person2": "file",
"spelling_corpus": "file",
"license_keys": "file",
"nodes": "file",
"binaries": "file",
"braintree": "file",
"preprocessors": "file",
"postprocessors": "file",
"regex_templates": "file",
"usergroups": "file",
"learnf": "file"
},
"stores": {
"sql": {
"type": "sql",
"config": {
"url": "sqlite:///:memory",
"echo": false,
"encoding": "utf-8",
"create_db": true,
"drop_all_first": true
}
},
"mongo": {
"type": "mongo",
"config": {
"url": "mongodb://localhost:27017/",
"database": "programy",
"drop_all_first": true
}
},
"redis": {
"type": "redis",
"config": {
"host": "localhost",
"port": 6379,
"password": null,
"db": 0,
"prefix": "programy",
"drop_all_first": true
}
},
"file": {
"type": "file",
"config": {
"category_storage": {
"files": "./storage/categories"
},
"conversations_storage": {
"files": "./storage/conversations"
},
"sets_storage": {
"files": "./storage/sets",
"extension": ".txt",
"directories": false
},
"maps_storage": {
"files": "./storage/maps",
"extension": ".txt",
"directories": false
},
"regex_templates": {
"files": "./storage/regex"
},
"lookups_storage": {
"files": "./storage/lookups",
"extension": ".txt",
"directories": false
},
"properties_storage": {
"file": "./storage/properties.txt"
},
"defaults_storage": {
"file": "./storage/defaults.txt"
},
"rdf_storage": {
"files": "./storage/rdfs",
"extension": ".txt",
"directories": true
},
"spelling_corpus": {
"file": "./storage/spelling/corpus.txt"
},
"license_keys": {
"file": "./storage/license.keys"
},
"nodes": {
"files": "./storage/nodes"
},
"binaries": {
"files": "./storage/binaries"
},
"braintree": {
"file": "./storage/braintree/braintree.xml",
"format": "xml"
},
"preprocessors": {
"file": "./storage/processing/preprocessors.txt"
},
"postprocessors": {
"file": "./storage/processing/postprocessing.txt"
},
"usergroups": {
"files": "./storage/security/usergroups.txt"
},
"learnf": {
"files": "./storage/categories/learnf"
}
}
}
}
},
"logger": {
"type": "logger",
"config": {
"conversation_logger": "conversation"
}
}
},
"voice": {
"license_keys": "$BOT_ROOT/config/license.keys",
"tts": "osx",
"stt": "azhang",
"osx": {
"classname": "talky.clients.voice.tts.osxsay.OSXSayTextToSpeach"
},
"pytts": {
"classname": "talky.clients.voice.tts.pyttssay.PyTTSSayTextToSpeach",
"rate_adjust": 10
},
"azhang": {
"classname": "talky.clients.voice.stt.azhang.AnthonyZhangSpeechToText",
"ambient_adjust": 3,
"service": "ibm"
}
},
"rest": {
"host": "0.0.0.0",
"port": 8989,
"debug": false,
"workers": 4,
"license_keys": "$BOT_ROOT/config/license.keys"
},
"webchat": {
"host": "0.0.0.0",
"port": 8090,
"debug": false,
"license_keys": "$BOT_ROOT/config/license.keys",
"api": "/api/web/v1.0/ask"
},
"twitter": {
"polling": true,
"polling_interval": 49,
"streaming": false,
"use_status": true,
"use_direct_message": true,
"auto_follow": true,
"storage": "file",
"welcome_message": "Thanks for following me, send me a message and I'll try and help",
"license_keys": "file"
},
"xmpp": {
"server": "talk.google.com",
"port": 5222,
"xep_0030": true,
"xep_0004": true,
"xep_0060": true,
"xep_0199": true,
"license_keys": "file"
},
"socket": {
"host": "127.0.0.1",
"port": 9999,
"queue": 5,
"debug": true,
"license_keys": "file"
},
"telegram": {
"unknown_command": "Sorry, that is not a command I have been taught yet!",
"license_keys": "file"
},
"facebook": {
"host": "127.0.0.1",
"port": 5000,
"debug": false,
"license_keys": "file"
},
"twilio": {
"host": "127.0.0.1",
"port": 5000,
"debug": false,
"license_keys": "file"
},
"slack": {
"polling_interval": 1,
"license_keys": "file"
},
"viber": {
"name": "Servusai",
"avatar": "http://viber.com/avatar.jpg",
"license_keys": "file"
},
"line": {
"host": "127.0.0.1",
"port": 8084,
"debug": false,
"license_keys": "file"
},
"kik": {
"bot_name": "servusai",
"webhook": "https://93638f7a.ngrok.io/api/kik/v1.0/ask",
"host": "127.0.0.1",
"port": 8082,
"debug": false,
"license_keys": "file"
},
"bot": {
"brain": "brain",
"initial_question": "Hi, how can I help you today?",
"initial_question_srai": "YINITIALQUESTION",
"default_response": "Sorry, I don't have an answer for that!",
"default_response_srai": "YEMPTY",
"empty_string": "YEMPTY",
"exit_response": "So long, and thanks for the fish!",
"exit_response_srai": "YEXITRESPONSE",
"override_properties": true,
"max_question_recursion": 1000,
"max_question_timeout": 60,
"max_search_depth": 100,
"max_search_timeout": 60,
"spelling": {
"load": true,
"classname": "programy.spelling.norvig.NorvigSpellingChecker",
"check_before": true,
"check_and_retry": true
},
"conversations": {
"max_histories": 100,
"restore_last_topic": false,
"initial_topic": "TOPIC1",
"empty_on_start": false
}
},
"brain": {
"overrides": {
"allow_system_aiml": true,
"allow_learn_aiml": true,
"allow_learnf_aiml": true
},
"defaults": {
"default-get": "unknown",
"default-property": "unknown",
"default-map": "unknown",
"learnf-path": "file"
},
"binaries": {
"save_binary": true,
"load_binary": true,
"load_aiml_on_binary_fail": true
},
"braintree": {
"create": true
},
"services": {
"REST": {
"classname": "programy.services.rest.GenericRESTService",
"method": "GET",
"host": "0.0.0.0",
"port": 8080
},
"Pannous": {
"classname": "programy.services.pannous.PannousService",
"url": "http://weannie.pannous.com/api"
}
},
"security": {
"authentication": {
"classname": "programy.security.authenticate.passthrough.BasicPassThroughAuthenticationService",
"denied_srai": "AUTHENTICATION_FAILED"
},
"authorisation": {
"classname": "programy.security.authorise.usergroupsauthorisor.BasicUserGroupAuthorisationService",
"denied_srai": "AUTHORISATION_FAILED",
"usergroups": {
"storage": "file"
}
}
},
"oob": {
"default": {
"classname": "programy.oob.defaults.default.DefaultOutOfBandProcessor"
},
"alarm": {
"classname": "programy.oob.defaults.alarm.AlarmOutOfBandProcessor"
},
"camera": {
"classname": "programy.oob.defaults.camera.CameraOutOfBandProcessor"
},
"clear": {
"classname": "programy.oob.defaults.clear.ClearOutOfBandProcessor"
},
"dial": {
"classname": "programy.oob.defaults.dial.DialOutOfBandProcessor"
},
"dialog": {
"classname": "programy.oob.defaults.dialog.DialogOutOfBandProcessor"
},
"email": {
"classname": "programy.oob.defaults.email.EmailOutOfBandProcessor"
},
"geomap": {
"classname": "programy.oob.defaults.map.MapOutOfBandProcessor"
},
"schedule": {
"classname": "programy.oob.defaults.schedule.ScheduleOutOfBandProcessor"
},
"search": {
"classname": "programy.oob.defaults.search.SearchOutOfBandProcessor"
},
"sms": {
"classname": "programy.oob.defaults.sms.SMSOutOfBandProcessor"
},
"url": {
"classname": "programy.oob.defaults.url.URLOutOfBandProcessor"
},
"wifi": {
"classname": "programy.oob.defaults.wifi.WifiOutOfBandProcessor"
}
},
"dynamic": {
"variables": {
"gettime": "programy.dynamic.variables.datetime.GetTime"
},
"sets": {
"numeric": "programy.dynamic.sets.numeric.IsNumeric",
"roman": "programy.dynamic.sets.roman.IsRomanNumeral"
},
"maps": {
"romantodec": "programy.dynamic.maps.roman.MapRomanToDecimal",
"dectoroman": "programy.dynamic.maps.roman.MapDecimalToRoman"
}
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assert_configuration(configuration)
def test_load_additionals(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"console": {
"bot": "bot"
},
"bot": {
"brain": "brain"
},
"brain": {
"security": {
"authentication": {
"classname": "programy.security.authenticate.passthrough.PassThroughAuthenticationService",
"denied_srai": "ACCESS_DENIED"
}
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
auth_service = configuration.client_configuration.configurations[0].configurations[0].security.authentication
self.assertIsNotNone(auth_service)
self.assertEqual("ACCESS_DENIED", auth_service.denied_srai)
def test_load_with_subs(self):
subs = Substitutions()
subs.add_substitute("$ALLOW_SYSTEM", True)
config_data = JSONConfigurationFile()
self.assertIsNotNone(config_data)
configuration = config_data.load_from_text("""
{
"brain": {
"overrides": {
"allow_system_aiml": true,
"allow_learn_aiml": true,
"allow_learnf_aiml": true
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
section = config_data.get_section("brainx")
self.assertIsNone(section)
section = config_data.get_section("brain")
self.assertIsNotNone(section)
child_section = config_data.get_section("overrides", section)
self.assertIsNotNone(child_section)
self.assertEqual(True, config_data.get_option(child_section, "allow_system_aiml"))
self.assertEqual(True, config_data.get_bool_option(child_section, "allow_system_aiml"))
self.assertEqual(False, config_data.get_bool_option(child_section, "other_value"))
| 31.245841
| 129
| 0.562885
| 1,520
| 16,904
| 6.094079
| 0.290132
| 0.02375
| 0.022455
| 0.039296
| 0.336716
| 0.316852
| 0.272698
| 0.246249
| 0.246249
| 0.244305
| 0
| 0.01316
| 0.29425
| 16,904
| 540
| 130
| 31.303704
| 0.763286
| 0.062825
| 0
| 0.262525
| 0
| 0.004008
| 0.731906
| 0.142857
| 0
| 0
| 0
| 0
| 0.08016
| 1
| 0.014028
| false
| 0.006012
| 0.01002
| 0
| 0.026052
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c3934843ce267b0dc897db0634f69b0dfaade62
| 280
|
py
|
Python
|
Data_Structures/2d_array_ds.py
|
csixteen/HackerRank
|
3ef6fa48599341f481b9e266c69df2d449a7b313
|
[
"MIT"
] | 4
|
2018-04-19T20:32:54.000Z
|
2020-04-21T12:28:00.000Z
|
Data_Structures/2d_array_ds.py
|
csixteen/HackerRank
|
3ef6fa48599341f481b9e266c69df2d449a7b313
|
[
"MIT"
] | null | null | null |
Data_Structures/2d_array_ds.py
|
csixteen/HackerRank
|
3ef6fa48599341f481b9e266c69df2d449a7b313
|
[
"MIT"
] | null | null | null |
matrix = [list(map(int, input().split())) for _ in range(6)]
max_sum = None
for i in range(4):
for j in range(4):
s = sum(matrix[i][j:j+3]) + matrix[i+1][j+1] + sum(matrix[i+2][j:j+3])
if max_sum is None or s > max_sum:
max_sum = s
print(max_sum)
| 28
| 78
| 0.557143
| 57
| 280
| 2.631579
| 0.421053
| 0.2
| 0.106667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038278
| 0.253571
| 280
| 9
| 79
| 31.111111
| 0.679426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c464f3f464935bc0cc2e17b41fede6128938835
| 1,200
|
py
|
Python
|
async_sched/client/__init__.py
|
justengel/async_sched
|
f980722d51d15025522b2265426b0188ff368418
|
[
"MIT"
] | 1
|
2020-10-19T13:36:20.000Z
|
2020-10-19T13:36:20.000Z
|
async_sched/client/__init__.py
|
justengel/async_sched
|
f980722d51d15025522b2265426b0188ff368418
|
[
"MIT"
] | null | null | null |
async_sched/client/__init__.py
|
justengel/async_sched
|
f980722d51d15025522b2265426b0188ff368418
|
[
"MIT"
] | null | null | null |
from async_sched.client import quit_server as module_quit
from async_sched.client import request_schedules as module_request
from async_sched.client import run_command as module_run
from async_sched.client import schedule_command as module_schedule
from async_sched.client import stop_schedule as module_stop
from async_sched.client import update_server as module_update
from .client import Client, \
quit_server_async, quit_server, update_server_async, update_server, request_schedules_async, \
request_schedules, run_command_async, run_command, schedule_command_async, schedule_command, \
stop_schedule_async, stop_schedule
# The other modules in this package exist for the "-m" python flag
# `python -m async_sched.client.request_schedules --host "12.0.0.1" --port 8000`
__all__ = ['Client',
'quit_server_async', 'quit_server', 'update_server_async', 'update_server', 'request_schedules_async',
'request_schedules', 'run_command_async', 'run_command', 'schedule_command_async', 'schedule_command',
'stop_schedule_async', 'stop_schedule',
'module_quit', 'module_request', 'module_run', 'module_schedule', 'module_stop', 'module_update']
| 52.173913
| 113
| 0.785
| 164
| 1,200
| 5.341463
| 0.207317
| 0.079909
| 0.127854
| 0.136986
| 0.60274
| 0.424658
| 0.424658
| 0.424658
| 0.424658
| 0.424658
| 0
| 0.008637
| 0.131667
| 1,200
| 22
| 114
| 54.545455
| 0.832054
| 0.119167
| 0
| 0
| 0
| 0
| 0.263757
| 0.042695
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.466667
| 0
| 0.466667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
9c48342a450b3888ddd355595c9462c4c225a106
| 2,880
|
py
|
Python
|
account_processing.py
|
amitjoshi9627/Playong
|
d54a8db05ae5035e122b8bc8d84c849f25483005
|
[
"MIT"
] | 4
|
2019-04-22T15:16:45.000Z
|
2020-01-17T12:57:09.000Z
|
account_processing.py
|
amitjoshi9627/Playong
|
d54a8db05ae5035e122b8bc8d84c849f25483005
|
[
"MIT"
] | null | null | null |
account_processing.py
|
amitjoshi9627/Playong
|
d54a8db05ae5035e122b8bc8d84c849f25483005
|
[
"MIT"
] | null | null | null |
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.options import Options
import getpass
import time
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from utils import *
def login_user(browser, email='', password=''):
print('Redirecting to login page..')
browser.find_element_by_xpath('//*[@id="login-btn"]').click()
if email is '':
email, password = take_credentials()
browser.find_element_by_id("login_username").send_keys(email)
browser.find_element_by_id("login_password").send_keys(password)
complete_captcha(browser)
time.sleep(4)
browser.find_element_by_xpath('//*[@id="static-login-btn"]').click()
def logout_user(browser):
print("\nThank you for your using the program! Logging you out from jiosaavn...")
show_notificaton("Thank", "You", 0)
action = ActionChains(browser)
menu = browser.find_element_by_class_name('user-name')
action.move_to_element(menu).perform()
menu.click()
browser.find_element_by_xpath(
'/html/body/div[2]/div/div[2]/div[3]/div[3]/ol/li[4]/a').click()
time.sleep(2)
print('Logout..successful...')
def check_credentials(browser):
print('Checking credentials...Please wait..')
time.sleep(5)
try:
close_promo_ad(browser)
accept_cookies(browser)
success = True
except:
success = False
return success
def wrong_credentials_check(browser, counts=1):
while success != True:
print("\nWrong username/password entered.Please try again...\n")
email = input("Enter your email for jiosaavn account: ")
password = getpass.getpass(f"Enter password for {email}: ")
email_element = browser.find_element_by_id("login_username")
email_element.clear()
email_element.send_keys(email)
pswd_element = browser.find_element_by_id("login_password")
pswd_element.clear()
pswd_element.send_keys(password)
browser.find_element_by_xpath('//*[@id="static-login-btn"]').click()
success = check_credentials(browser)
counts += 1
if counts > 4:
print('Too many unsuccessful attempts done. Exiting...\n')
break
return counts
def go_without_login(browser):
return False
def take_credentials():
email = input("Enter your email for jiosaavn account: ")
password = getpass.getpass(f"Enter password for {email}: ")
return email, password
def prompt(browser):
# response = int(input("Press 1 to Log in with you account else Press 0: "))
# if response:
# login_user(browser)
# return True
# else:
# go_without_login(browser)
print("Due to some issues.. Login Option is not available currently! Sorry for the inconvenience caused.")
go_without_login(browser)
| 32.359551
| 110
| 0.682986
| 370
| 2,880
| 5.140541
| 0.345946
| 0.05205
| 0.085174
| 0.094637
| 0.247108
| 0.233964
| 0.219769
| 0.138801
| 0.138801
| 0.138801
| 0
| 0.006061
| 0.197917
| 2,880
| 88
| 111
| 32.727273
| 0.817316
| 0.056597
| 0
| 0.092308
| 0
| 0.015385
| 0.254982
| 0.047232
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107692
| false
| 0.153846
| 0.107692
| 0.015385
| 0.276923
| 0.107692
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
9c49c6272ae1b539badcabd74a81163ceda4090b
| 1,104
|
py
|
Python
|
Mundo 3/teste.py
|
RafaelSdm/Curso-de-Python
|
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
|
[
"MIT"
] | 1
|
2021-03-10T21:53:38.000Z
|
2021-03-10T21:53:38.000Z
|
Mundo 3/teste.py
|
RafaelSdm/Curso-de-Python
|
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
|
[
"MIT"
] | null | null | null |
Mundo 3/teste.py
|
RafaelSdm/Curso-de-Python
|
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
|
[
"MIT"
] | null | null | null |
pessoas = {'nomes': "Rafael","sexo":"macho alfa","idade":19}
print(f"o {pessoas['nomes']} que se considera um {pessoas['sexo']} possui {pessoas['idade']}")
print(pessoas.keys())
print(pessoas.values())
print(pessoas.items())
for c in pessoas.keys():
print(c)
for c in pessoas.values():
print(c)
for c, j in pessoas.items():
print(f"o {c} pertence ao {j}")
del pessoas['sexo']
print(pessoas)
pessoas["sexo"] = "macho alfa"
print(pessoas)
print("outro codida daqui pra frente \n\n\n\n\n\n")
estado1 = {'estado': 'minas gerais', 'cidade':'capela nova' }
estado2 = {'estado':'rio de janeiro', 'cidade':"rossinha"}
brasil = []
brasil.append(estado1)
brasil.append(estado2)
print(brasil)
print(f"o brasil possui um estado chamado {brasil[0]['estado']} e a prorpia possui uma cidade chamada {brasil[0]['cidade']}")
print("-"*45)
es = {}
br = []
for c in range(0,3):
es['estado'] = str(input("informe o seu estado:"))
es['cidade'] = str(input("informe a sua cidade:"))
br.append(es.copy())
for c in br:
for i,j in c.items():
print(f"o campo {i} tem valor {j}")
| 23
| 125
| 0.638587
| 173
| 1,104
| 4.075145
| 0.393064
| 0.085106
| 0.039716
| 0.017021
| 0.008511
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012917
| 0.158514
| 1,104
| 47
| 126
| 23.489362
| 0.745963
| 0
| 0
| 0.121212
| 0
| 0.060606
| 0.416667
| 0.038043
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.424242
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
9c4cf09ffcfa4dd9bf0d914e9750a3f14e039df3
| 605
|
py
|
Python
|
examples/basic/findQSpark.py
|
myriadrf/pyLMS7002M
|
b866deea1f05dba44c9ed1a1a4666352b811b66b
|
[
"Apache-2.0"
] | 46
|
2016-11-29T05:10:36.000Z
|
2021-10-31T19:27:46.000Z
|
examples/basic/findQSpark.py
|
myriadrf/pyLMS7002M
|
b866deea1f05dba44c9ed1a1a4666352b811b66b
|
[
"Apache-2.0"
] | 2
|
2017-04-15T21:36:01.000Z
|
2017-06-08T09:44:26.000Z
|
examples/basic/findQSpark.py
|
myriadrf/pyLMS7002M
|
b866deea1f05dba44c9ed1a1a4666352b811b66b
|
[
"Apache-2.0"
] | 16
|
2016-11-28T20:47:55.000Z
|
2021-04-07T01:48:20.000Z
|
from pyLMS7002M import *
print("Searching for QSpark...")
try:
QSpark = QSpark()
except:
print("QSpark not found")
exit(1)
print("\QSpark info:")
QSpark.printInfo() # print the QSpark board info
# QSpark.LMS7002_Reset() # reset the LMS7002M
lms7002 = QSpark.getLMS7002() # get the LMS7002M object
ver, rev, mask = lms7002.chipInfo # get the chip info
print("\nLMS7002M info:")
print("VER : "+str(ver))
print("REV : "+str(rev))
print("MASK : "+str(mask))
| 31.842105
| 80
| 0.528926
| 64
| 605
| 4.984375
| 0.46875
| 0.068966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083756
| 0.34876
| 605
| 18
| 81
| 33.611111
| 0.725888
| 0.231405
| 0
| 0
| 0
| 0
| 0.272926
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.066667
| 0
| 0.066667
| 0.533333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
9c5b28789209abf7074e8e365fb1d2e93079992e
| 2,109
|
py
|
Python
|
tests/test_bindiff.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 6,132
|
2015-08-06T23:24:47.000Z
|
2022-03-31T21:49:34.000Z
|
tests/test_bindiff.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 2,272
|
2015-08-10T08:40:07.000Z
|
2022-03-31T23:46:44.000Z
|
tests/test_bindiff.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 1,155
|
2015-08-06T23:37:39.000Z
|
2022-03-31T05:54:11.000Z
|
import nose
import angr
import logging
l = logging.getLogger("angr.tests.test_bindiff")
import os
test_location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests')
# todo make a better test
def test_bindiff_x86_64():
binary_path_1 = os.path.join(test_location, 'x86_64', 'bindiff_a')
binary_path_2 = os.path.join(test_location, 'x86_64', 'bindiff_b')
b = angr.Project(binary_path_1, load_options={"auto_load_libs": False})
b2 = angr.Project(binary_path_2, load_options={"auto_load_libs": False})
bindiff = b.analyses.BinDiff(b2)
identical_functions = bindiff.identical_functions
differing_functions = bindiff.differing_functions
unmatched_functions = bindiff.unmatched_functions
# check identical functions
nose.tools.assert_in((0x40064c, 0x40066a), identical_functions)
# check differing functions
nose.tools.assert_in((0x400616, 0x400616), differing_functions)
# check unmatched functions
nose.tools.assert_less_equal(len(unmatched_functions[0]), 1)
nose.tools.assert_less_equal(len(unmatched_functions[1]), 2)
# check for no major regressions
nose.tools.assert_greater(len(identical_functions), len(differing_functions))
nose.tools.assert_less(len(differing_functions), 4)
# check a function diff
fdiff = bindiff.get_function_diff(0x400616, 0x400616)
block_matches = { (a.addr, b.addr) for a, b in fdiff.block_matches }
nose.tools.assert_in((0x40064a, 0x400668), block_matches)
nose.tools.assert_in((0x400616, 0x400616), block_matches)
nose.tools.assert_in((0x40061e, 0x40061e), block_matches)
def run_all():
functions = globals()
all_functions = dict(filter((lambda kv: kv[0].startswith('test_')), functions.items()))
for f in sorted(all_functions.keys()):
if hasattr(all_functions[f], '__call__'):
all_functions[f]()
if __name__ == "__main__":
logging.getLogger("angr.analyses.bindiff").setLevel(logging.DEBUG)
import sys
if len(sys.argv) > 1:
globals()['test_' + sys.argv[1]]()
else:
run_all()
| 39.055556
| 106
| 0.719772
| 282
| 2,109
| 5.099291
| 0.315603
| 0.056328
| 0.09388
| 0.05911
| 0.305981
| 0.243394
| 0.109875
| 0.109875
| 0
| 0
| 0
| 0.058758
| 0.152679
| 2,109
| 53
| 107
| 39.792453
| 0.745943
| 0.07302
| 0
| 0
| 0
| 0
| 0.074435
| 0.022587
| 0
| 0
| 0.049281
| 0.018868
| 0.230769
| 1
| 0.051282
| false
| 0
| 0.128205
| 0
| 0.179487
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c5f1cf8cb3617f22a594d7ff47f26bbe868fb45
| 326
|
py
|
Python
|
01-logica-de-programacao-e-algoritmos/Aula 06/01 Tuplas/1.2 Desempacotamento de parametros em funcoes/ex01.py
|
rafaelbarretomg/Uninter
|
1f84b0103263177122663e991db3a8aeb106a959
|
[
"MIT"
] | null | null | null |
01-logica-de-programacao-e-algoritmos/Aula 06/01 Tuplas/1.2 Desempacotamento de parametros em funcoes/ex01.py
|
rafaelbarretomg/Uninter
|
1f84b0103263177122663e991db3a8aeb106a959
|
[
"MIT"
] | null | null | null |
01-logica-de-programacao-e-algoritmos/Aula 06/01 Tuplas/1.2 Desempacotamento de parametros em funcoes/ex01.py
|
rafaelbarretomg/Uninter
|
1f84b0103263177122663e991db3a8aeb106a959
|
[
"MIT"
] | null | null | null |
# Desempacotamento de parametros em funcoes
# somando valores de uma tupla
def soma(*num):
soma = 0
print('Tupla: {}' .format(num))
for i in num:
soma += i
return soma
# Programa principal
print('Resultado: {}\n' .format(soma(1, 2)))
print('Resultado: {}\n' .format(soma(1, 2, 3, 4, 5, 6, 7, 8, 9)))
| 23.285714
| 65
| 0.604294
| 50
| 326
| 3.94
| 0.64
| 0.071066
| 0.152284
| 0.213198
| 0.274112
| 0.274112
| 0.274112
| 0
| 0
| 0
| 0
| 0.047431
| 0.223926
| 326
| 13
| 66
| 25.076923
| 0.731225
| 0.273006
| 0
| 0
| 0
| 0
| 0.167382
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0
| 0.25
| 0.375
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c6a899bfa0fce8fa48384ca11c89371d3bdbbc4
| 10,449
|
py
|
Python
|
tests/test_decorators.py
|
stephenfin/django-rest-framework
|
9d001cd84c1239d708b1528587c183ef30e38c31
|
[
"BSD-3-Clause"
] | 1
|
2019-01-11T13:56:41.000Z
|
2019-01-11T13:56:41.000Z
|
tests/test_decorators.py
|
stephenfin/django-rest-framework
|
9d001cd84c1239d708b1528587c183ef30e38c31
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_decorators.py
|
stephenfin/django-rest-framework
|
9d001cd84c1239d708b1528587c183ef30e38c31
|
[
"BSD-3-Clause"
] | 1
|
2019-06-29T12:46:16.000Z
|
2019-06-29T12:46:16.000Z
|
from __future__ import unicode_literals
import pytest
from django.test import TestCase
from rest_framework import status
from rest_framework.authentication import BasicAuthentication
from rest_framework.decorators import (
action, api_view, authentication_classes, detail_route, list_route,
parser_classes, permission_classes, renderer_classes, schema,
throttle_classes
)
from rest_framework.parsers import JSONParser
from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from rest_framework.schemas import AutoSchema
from rest_framework.test import APIRequestFactory
from rest_framework.throttling import UserRateThrottle
from rest_framework.views import APIView
class DecoratorTestCase(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def _finalize_response(self, request, response, *args, **kwargs):
response.request = request
return APIView.finalize_response(self, request, response, *args, **kwargs)
def test_api_view_incorrect(self):
"""
If @api_view is not applied correct, we should raise an assertion.
"""
@api_view
def view(request):
return Response()
request = self.factory.get('/')
self.assertRaises(AssertionError, view, request)
def test_api_view_incorrect_arguments(self):
"""
If @api_view is missing arguments, we should raise an assertion.
"""
with self.assertRaises(AssertionError):
@api_view('GET')
def view(request):
return Response()
def test_calling_method(self):
@api_view(['GET'])
def view(request):
return Response({})
request = self.factory.get('/')
response = view(request)
assert response.status_code == status.HTTP_200_OK
request = self.factory.post('/')
response = view(request)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_calling_put_method(self):
@api_view(['GET', 'PUT'])
def view(request):
return Response({})
request = self.factory.put('/')
response = view(request)
assert response.status_code == status.HTTP_200_OK
request = self.factory.post('/')
response = view(request)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_calling_patch_method(self):
@api_view(['GET', 'PATCH'])
def view(request):
return Response({})
request = self.factory.patch('/')
response = view(request)
assert response.status_code == status.HTTP_200_OK
request = self.factory.post('/')
response = view(request)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_renderer_classes(self):
@api_view(['GET'])
@renderer_classes([JSONRenderer])
def view(request):
return Response({})
request = self.factory.get('/')
response = view(request)
assert isinstance(response.accepted_renderer, JSONRenderer)
def test_parser_classes(self):
@api_view(['GET'])
@parser_classes([JSONParser])
def view(request):
assert len(request.parsers) == 1
assert isinstance(request.parsers[0], JSONParser)
return Response({})
request = self.factory.get('/')
view(request)
def test_authentication_classes(self):
@api_view(['GET'])
@authentication_classes([BasicAuthentication])
def view(request):
assert len(request.authenticators) == 1
assert isinstance(request.authenticators[0], BasicAuthentication)
return Response({})
request = self.factory.get('/')
view(request)
def test_permission_classes(self):
@api_view(['GET'])
@permission_classes([IsAuthenticated])
def view(request):
return Response({})
request = self.factory.get('/')
response = view(request)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_throttle_classes(self):
class OncePerDayUserThrottle(UserRateThrottle):
rate = '1/day'
@api_view(['GET'])
@throttle_classes([OncePerDayUserThrottle])
def view(request):
return Response({})
request = self.factory.get('/')
response = view(request)
assert response.status_code == status.HTTP_200_OK
response = view(request)
assert response.status_code == status.HTTP_429_TOO_MANY_REQUESTS
def test_schema(self):
"""
Checks CustomSchema class is set on view
"""
class CustomSchema(AutoSchema):
pass
@api_view(['GET'])
@schema(CustomSchema())
def view(request):
return Response({})
assert isinstance(view.cls.schema, CustomSchema)
class ActionDecoratorTestCase(TestCase):
def test_defaults(self):
@action(detail=True)
def test_action(request):
"""Description"""
assert test_action.mapping == {'get': 'test_action'}
assert test_action.detail is True
assert test_action.url_path == 'test_action'
assert test_action.url_name == 'test-action'
assert test_action.kwargs == {
'name': 'Test action',
'description': 'Description',
}
def test_detail_required(self):
with pytest.raises(AssertionError) as excinfo:
@action()
def test_action(request):
raise NotImplementedError
assert str(excinfo.value) == "@action() missing required argument: 'detail'"
def test_method_mapping_http_methods(self):
# All HTTP methods should be mappable
@action(detail=False, methods=[])
def test_action():
raise NotImplementedError
for name in APIView.http_method_names:
def method():
raise NotImplementedError
# Python 2.x compatibility - cast __name__ to str
method.__name__ = str(name)
getattr(test_action.mapping, name)(method)
# ensure the mapping returns the correct method name
for name in APIView.http_method_names:
assert test_action.mapping[name] == name
def test_view_name_kwargs(self):
"""
'name' and 'suffix' are mutually exclusive kwargs used for generating
a view's display name.
"""
# by default, generate name from method
@action(detail=True)
def test_action(request):
raise NotImplementedError
assert test_action.kwargs == {
'description': None,
'name': 'Test action',
}
# name kwarg supersedes name generation
@action(detail=True, name='test name')
def test_action(request):
raise NotImplementedError
assert test_action.kwargs == {
'description': None,
'name': 'test name',
}
# suffix kwarg supersedes name generation
@action(detail=True, suffix='Suffix')
def test_action(request):
raise NotImplementedError
assert test_action.kwargs == {
'description': None,
'suffix': 'Suffix',
}
# name + suffix is a conflict.
with pytest.raises(TypeError) as excinfo:
action(detail=True, name='test name', suffix='Suffix')
assert str(excinfo.value) == "`name` and `suffix` are mutually exclusive arguments."
def test_method_mapping(self):
@action(detail=False)
def test_action(request):
raise NotImplementedError
@test_action.mapping.post
def test_action_post(request):
raise NotImplementedError
# The secondary handler methods should not have the action attributes
for name in ['mapping', 'detail', 'url_path', 'url_name', 'kwargs']:
assert hasattr(test_action, name) and not hasattr(test_action_post, name)
def test_method_mapping_already_mapped(self):
@action(detail=True)
def test_action(request):
raise NotImplementedError
msg = "Method 'get' has already been mapped to '.test_action'."
with self.assertRaisesMessage(AssertionError, msg):
@test_action.mapping.get
def test_action_get(request):
raise NotImplementedError
def test_method_mapping_overwrite(self):
@action(detail=True)
def test_action():
raise NotImplementedError
msg = ("Method mapping does not behave like the property decorator. You "
"cannot use the same method name for each mapping declaration.")
with self.assertRaisesMessage(AssertionError, msg):
@test_action.mapping.post
def test_action():
raise NotImplementedError
def test_detail_route_deprecation(self):
with pytest.warns(DeprecationWarning) as record:
@detail_route()
def view(request):
raise NotImplementedError
assert len(record) == 1
assert str(record[0].message) == (
"`detail_route` is deprecated and will be removed in "
"3.10 in favor of `action`, which accepts a `detail` bool. Use "
"`@action(detail=True)` instead."
)
def test_list_route_deprecation(self):
with pytest.warns(DeprecationWarning) as record:
@list_route()
def view(request):
raise NotImplementedError
assert len(record) == 1
assert str(record[0].message) == (
"`list_route` is deprecated and will be removed in "
"3.10 in favor of `action`, which accepts a `detail` bool. Use "
"`@action(detail=False)` instead."
)
def test_route_url_name_from_path(self):
# pre-3.8 behavior was to base the `url_name` off of the `url_path`
with pytest.warns(DeprecationWarning):
@list_route(url_path='foo_bar')
def view(request):
raise NotImplementedError
assert view.url_path == 'foo_bar'
assert view.url_name == 'foo-bar'
| 31.954128
| 92
| 0.624175
| 1,112
| 10,449
| 5.68705
| 0.181655
| 0.036528
| 0.030993
| 0.039532
| 0.509013
| 0.438014
| 0.397849
| 0.338393
| 0.290006
| 0.242568
| 0
| 0.005996
| 0.281749
| 10,449
| 326
| 93
| 32.052147
| 0.836642
| 0.066226
| 0
| 0.491304
| 0
| 0
| 0.088562
| 0.004667
| 0
| 0
| 0
| 0
| 0.165217
| 1
| 0.217391
| false
| 0.004348
| 0.06087
| 0.03913
| 0.347826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c6c043e7e279ee40586854016feb8a49ecc6e3c
| 661
|
py
|
Python
|
tamilmorse/morse_encode.py
|
CRE2525/open-tamil
|
ffc02509f7b8a6a17644c85799a475a8ba623954
|
[
"MIT"
] | 1
|
2021-08-03T19:35:18.000Z
|
2021-08-03T19:35:18.000Z
|
tamilmorse/morse_encode.py
|
CRE2525/open-tamil
|
ffc02509f7b8a6a17644c85799a475a8ba623954
|
[
"MIT"
] | null | null | null |
tamilmorse/morse_encode.py
|
CRE2525/open-tamil
|
ffc02509f7b8a6a17644c85799a475a8ba623954
|
[
"MIT"
] | null | null | null |
## -*- coding: utf-8 -*-
#(C) 2018 Muthiah Annamalai
# This file is part of Open-Tamil project
# You may use or distribute this file under terms of MIT license
import codecs
import json
import tamil
import sys
import os
#e.g. python morse_encode.py கலைஞர்
CURRDIR = os.path.dirname(os.path.realpath(__file__))
def encode(text):
with codecs.open(os.path.join(CURRDIR,"data","madurai_tamilmorse.json"),"r","utf-8") as fp:
codebook = json.loads(fp.read())
output = [codebook.get(l,l) for l in tamil.utf8.get_letters(text)]
return u" ".join(output)
if __name__ == u"__main__":
encode(u" ".join([i.decode("utf-8") for i in sys.argv[1:]]))
| 30.045455
| 95
| 0.688351
| 113
| 661
| 3.911504
| 0.637168
| 0.027149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016129
| 0.155825
| 661
| 21
| 96
| 31.47619
| 0.772401
| 0.278366
| 0
| 0
| 0
| 0
| 0.102128
| 0.048936
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.384615
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
9c6edb66b25f5b7d6f691984d70d7a69bf328bdb
| 469
|
py
|
Python
|
pyRasp.py
|
ToninoTarsi/pyRasp
|
a46bb1dc38c7547b60e24189ecf34310da770042
|
[
"MIT"
] | null | null | null |
pyRasp.py
|
ToninoTarsi/pyRasp
|
a46bb1dc38c7547b60e24189ecf34310da770042
|
[
"MIT"
] | null | null | null |
pyRasp.py
|
ToninoTarsi/pyRasp
|
a46bb1dc38c7547b60e24189ecf34310da770042
|
[
"MIT"
] | null | null | null |
# pyRasp
# Copyright (c) Tonino Tarsi 2020. Licensed under MIT.
# requirement :
# Python 3
# pip install pyyaml
# pip install request
# pip install f90nml
from downloadGFSA import downloadGFSA
from prepare_wps import prepare_wps
from ungrib import ungrib
from metgrid import metgrid
from prepare_wrf import prepare_wrf
from real import real
from wrf import wrf
result = downloadGFSA(True)
prepare_wps(result)
ungrib()
metgrid()
prepare_wrf(result)
real()
wrf()
| 16.75
| 54
| 0.784648
| 66
| 469
| 5.484848
| 0.424242
| 0.082873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017722
| 0.157783
| 469
| 27
| 55
| 17.37037
| 0.898734
| 0.302772
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
9c8366ee191973d219cc50c6458365ebe9053724
| 376
|
py
|
Python
|
Backjoon/1929.py
|
hanjungwoo1/CodingTest
|
0112488d04dd53cea1c869439341fb602e699f2a
|
[
"MIT"
] | 3
|
2022-03-29T04:56:50.000Z
|
2022-03-30T08:06:42.000Z
|
Backjoon/1929.py
|
hanjungwoo1/CodingTest
|
0112488d04dd53cea1c869439341fb602e699f2a
|
[
"MIT"
] | null | null | null |
Backjoon/1929.py
|
hanjungwoo1/CodingTest
|
0112488d04dd53cea1c869439341fb602e699f2a
|
[
"MIT"
] | null | null | null |
"""
입력 예시
3 16
출력 예시
3
5
7
11
13
"""
import math
left, right = map(int, input().split())
array = [True for i in range(right+1)]
array[1] = 0
for i in range(2, int(math.sqrt(right)) + 1):
if array[i] == True:
j = 2
while i * j <= right:
array[i * j] = False
j += 1
for i in range(left, right+1):
if array[i]:
print(i)
| 13.925926
| 45
| 0.505319
| 68
| 376
| 2.794118
| 0.470588
| 0.063158
| 0.094737
| 0.173684
| 0.147368
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071713
| 0.332447
| 376
| 27
| 46
| 13.925926
| 0.685259
| 0.077128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92bdca03049e78f08b91682f83e48976672f9a1b
| 456
|
py
|
Python
|
utils/get_season_things_price.py
|
vogelfenx/storagebot
|
64ab07b068bf645d7cdf5bb1cd5db91c0e2a9228
|
[
"MIT"
] | null | null | null |
utils/get_season_things_price.py
|
vogelfenx/storagebot
|
64ab07b068bf645d7cdf5bb1cd5db91c0e2a9228
|
[
"MIT"
] | 2
|
2021-11-24T18:20:00.000Z
|
2021-11-24T18:31:55.000Z
|
utils/get_season_things_price.py
|
vogelfenx/storagebot
|
64ab07b068bf645d7cdf5bb1cd5db91c0e2a9228
|
[
"MIT"
] | 4
|
2021-11-24T16:40:28.000Z
|
2021-11-28T10:40:57.000Z
|
def get_season_things_price(thing, amount, price):
if thing == 'wheel':
wheel_price = price[thing]['month'] * amount
return f'Стоимость составит {wheel_price}/месяц'
else:
other_thing_price_week = price[thing]['week'] * amount
other_thing_price_month = price[thing]['month'] * amount
return f'Стоимость составит {other_thing_price_week} р./неделю' + \
f' или {other_thing_price_month} р./месяц'
| 41.454545
| 75
| 0.662281
| 59
| 456
| 4.830508
| 0.355932
| 0.140351
| 0.210526
| 0.147368
| 0.315789
| 0.315789
| 0.315789
| 0.315789
| 0
| 0
| 0
| 0
| 0.219298
| 456
| 11
| 76
| 41.454545
| 0.800562
| 0
| 0
| 0
| 0
| 0
| 0.326039
| 0.107221
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92c32d549db39666405ca82ccd8b1e761fbef653
| 455
|
py
|
Python
|
dashboard/urls.py
|
EdisonBr/MockDados
|
c625cba7b93a8f31609549241c5aa71932e26b2d
|
[
"MIT"
] | null | null | null |
dashboard/urls.py
|
EdisonBr/MockDados
|
c625cba7b93a8f31609549241c5aa71932e26b2d
|
[
"MIT"
] | 4
|
2021-03-30T13:49:39.000Z
|
2021-06-10T19:40:02.000Z
|
dashboard/urls.py
|
smart320/MockDados
|
c625cba7b93a8f31609549241c5aa71932e26b2d
|
[
"MIT"
] | 1
|
2020-07-27T02:08:29.000Z
|
2020-07-27T02:08:29.000Z
|
from django.urls import path, re_path
from django.views.generic.base import TemplateView
from .views import dashboard_cost, dashboard_energy, MotorDataListView
app_name = 'dashboard'
urlpatterns = [
path('', MotorDataListView.as_view(), name='dashboard_custom'),
#path('', dashboard_custom, name='dashboard_custom'),
path('energy', dashboard_energy, name='dashboard_energy'),
path('cost', dashboard_cost, name='dashboard_cost'),
]
| 28.4375
| 71
| 0.745055
| 53
| 455
| 6.169811
| 0.377358
| 0.198777
| 0.116208
| 0.140673
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127473
| 455
| 15
| 72
| 30.333333
| 0.823678
| 0.114286
| 0
| 0
| 0
| 0
| 0.1625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
92c83bb936e6892d8eb39bcbfcb76fe95e1f5577
| 1,281
|
py
|
Python
|
docker/autoconfig.py
|
misc0110/bepasty-server
|
662179671220d680fed57aa90894ffebf57dd4c7
|
[
"BSD-2-Clause"
] | null | null | null |
docker/autoconfig.py
|
misc0110/bepasty-server
|
662179671220d680fed57aa90894ffebf57dd4c7
|
[
"BSD-2-Clause"
] | null | null | null |
docker/autoconfig.py
|
misc0110/bepasty-server
|
662179671220d680fed57aa90894ffebf57dd4c7
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/python
import os
import sys
SITENAME = os.environ.get("BEPASTY_SITENAME", None)
if SITENAME is None:
print("\n\nEnvironment variable BEPASTY_SITENAME must be set.")
sys.exit(1)
SECRET_KEY = os.environ.get("BEPASTY_SECRET_KEY", None)
if SECRET_KEY is None:
print("\n\nEnvironment variable BEPASTY_SECRET_KEY must be set.")
sys.exit(1)
APP_BASE_PATH = os.environ.get("BEPASTY_APP_BASE_PATH", None)
STORAGE_FILESYSTEM_DIRECTORY = os.environ.get(
"BEPASTY_STORAGE_FILESYSTEM_DIRECTORY", "/app/data",
)
DEFAULT_PERMISSIONS = os.environ.get("BEPASTY_DEFAULT_PERMISSIONS", "create,read")
PERMISSIONS = {}
admin_secret = os.environ.get("BEPASTY_ADMIN_SECRET", None)
if admin_secret is not None:
PERMISSIONS.update({admin_secret: "admin,list,create,modify,read,delete"})
try:
max_allowed_file_size = os.environ.get("BEPASTY_MAX_ALLOWED_FILE_SIZE", 5000000000)
MAX_ALLOWED_FILE_SIZE = int(max_allowed_file_size)
except ValueError as err:
print("\n\nInvalid BEPASTY_MAX_ALLOWED_FILE_SIZE: %s", str(err))
sys.exit(1)
try:
max_body_size = os.environ.get("BEPASTY_MAX_BODY_SIZE", 1040384)
MAX_BODY_SIZE = int(max_body_size)
except ValueError as err:
print("\n\nInvalid BEPASTY_MAX_BODY_SIZE: %s", str(err))
sys.exit(1)
| 30.5
| 87
| 0.753318
| 193
| 1,281
| 4.715026
| 0.295337
| 0.079121
| 0.105495
| 0.167033
| 0.358242
| 0.32967
| 0.235165
| 0.107692
| 0.107692
| 0.107692
| 0
| 0.018767
| 0.126464
| 1,281
| 41
| 88
| 31.243902
| 0.794459
| 0.01249
| 0
| 0.258065
| 0
| 0
| 0.344937
| 0.175633
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.064516
| 0
| 0.064516
| 0.129032
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92cf711f3ee4d4acd21a60efb873e479a2b9db00
| 447
|
py
|
Python
|
sparkdq/outliers/params/KSigmaParams.py
|
PasaLab/SparkDQ
|
16d50210747ef7de03cf36d689ce26ff7445f63a
|
[
"Apache-2.0"
] | 1
|
2021-02-08T07:49:54.000Z
|
2021-02-08T07:49:54.000Z
|
sparkdq/outliers/params/KSigmaParams.py
|
PasaLab/SparkDQ
|
16d50210747ef7de03cf36d689ce26ff7445f63a
|
[
"Apache-2.0"
] | null | null | null |
sparkdq/outliers/params/KSigmaParams.py
|
PasaLab/SparkDQ
|
16d50210747ef7de03cf36d689ce26ff7445f63a
|
[
"Apache-2.0"
] | null | null | null |
import json
from sparkdq.outliers.params.OutlierSolverParams import OutlierSolverParams
from sparkdq.outliers.OutlierSolver import OutlierSolver
class KSigmaParams(OutlierSolverParams):
def __init__(self, deviation=1.5):
self.deviation = deviation
def model(self):
return OutlierSolver.kSigma
@staticmethod
def from_json(json_str):
d = json.loads(json_str)
return KSigmaParams(d["deviation"])
| 23.526316
| 75
| 0.736018
| 48
| 447
| 6.708333
| 0.479167
| 0.068323
| 0.118012
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00551
| 0.187919
| 447
| 18
| 76
| 24.833333
| 0.881543
| 0
| 0
| 0
| 0
| 0
| 0.020134
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.083333
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92d2be755f1c0894c43d329732b414de4bf31ab2
| 195
|
py
|
Python
|
atcoder/abc132A_fifty_fifty.py
|
uninhm/kyopro
|
bf6ed9cbf6a5e46cde0291f7aa9d91a8ddf1f5a3
|
[
"BSD-3-Clause"
] | 31
|
2020-05-13T01:07:55.000Z
|
2021-07-13T07:53:26.000Z
|
atcoder/abc132A_fifty_fifty.py
|
uninhm/kyopro
|
bf6ed9cbf6a5e46cde0291f7aa9d91a8ddf1f5a3
|
[
"BSD-3-Clause"
] | 10
|
2020-05-20T07:22:09.000Z
|
2021-07-19T03:52:13.000Z
|
atcoder/abc132A_fifty_fifty.py
|
uninhm/kyopro
|
bf6ed9cbf6a5e46cde0291f7aa9d91a8ddf1f5a3
|
[
"BSD-3-Clause"
] | 14
|
2020-05-11T05:58:36.000Z
|
2021-12-07T03:20:43.000Z
|
# Vicfred
# https://atcoder.jp/contests/abc132/tasks/abc132_a
# implementation
S = list(input())
if len(set(S)) == 2:
if S.count(S[0]) == 2:
print("Yes")
quit()
print("No")
| 16.25
| 51
| 0.574359
| 29
| 195
| 3.827586
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.215385
| 195
| 11
| 52
| 17.727273
| 0.666667
| 0.369231
| 0
| 0
| 0
| 0
| 0.042373
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92d5a318d2e721b05edd8c4dc433e4875c24b448
| 6,318
|
py
|
Python
|
visual_perception/Detection/yolov4/__init__.py
|
SSusantAchary/Visual-Perception
|
b81ffe69ab85e9afb7ee6eece43ac83c8f292285
|
[
"MIT"
] | null | null | null |
visual_perception/Detection/yolov4/__init__.py
|
SSusantAchary/Visual-Perception
|
b81ffe69ab85e9afb7ee6eece43ac83c8f292285
|
[
"MIT"
] | null | null | null |
visual_perception/Detection/yolov4/__init__.py
|
SSusantAchary/Visual-Perception
|
b81ffe69ab85e9afb7ee6eece43ac83c8f292285
|
[
"MIT"
] | null | null | null |
"""
MIT License
Copyright (c) 2020 Susant Achary <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from visual_perception.Detection.yolov4.tf import YOLOv4 as yolo_main
import numpy as np
import cv2
labels = {0: 'person', 1: 'bicycle', 2: 'car', 3: 'motorcycle', 4: 'airplane', 5: 'bus', 6: 'train', 7: 'truck', 8: 'boat',
9: 'traffic light', 10: 'fire hydrant', 11: 'stop sign', 12: 'parking meter', 13: 'bench', 14: 'bird', 15: 'cat', 16: 'dog',
17: 'horse', 18: 'sheep', 19: 'cow', 20: 'elephant', 21: 'bear', 22: 'zebra', 23: 'giraffe', 24: 'backpack', 25: 'umbrella',
26: 'handbag', 27: 'tie', 28: 'suitcase', 29: 'frisbee', 30: 'skis', 31: 'snowboard', 32: 'sports ball', 33: 'kite',
34: 'baseball bat', 35: 'baseball glove', 36: 'skateboard', 37: 'surfboard', 38: 'tennis racket', 39: 'bottle', 40: 'wine glass',
41: 'cup', 42: 'fork', 43: 'knife', 44: 'spoon', 45: 'bowl', 46: 'banana', 47: 'apple', 48: 'sandwich', 49: 'orange',
50: 'broccoli', 51: 'carrot', 52: 'hot dog', 53: 'pizza', 54: 'donut', 55: 'cake', 56: 'chair', 57: 'couch', 58: 'potted plant',
59: 'bed', 60: 'dining table', 61: 'toilet', 62: 'tv', 63: 'laptop', 64: 'mouse', 65: 'remote', 66: 'keyboard', 67: 'cell phone',
68: 'microwave', 69: 'oven', 70: 'toaster', 71: 'sink', 72: 'refrigerator', 73: 'book', 74: 'clock', 75: 'vase', 76: 'scissors',
77: 'teddy bear', 78: 'hair drier', 79: 'toothbrush'}
class YOLOv4:
def __init__(self):
self.weights_path = ""
self.model = None
self.yolo_classes = ""
self.iou = 0
self.score = 0
self.input_shape = 0
self.output_path = ""
def load_model(self, weights_path:str = None, classes_path:str = None, input_shape:int = 608):
if (weights_path is None) or (classes_path is None):
raise RuntimeError ('weights_path AND classes_path should not be None.')
self.yolo_classes = classes_path
self.weights_path = weights_path
self.input_shape = input_shape
self.model = yolo_main(shape = self.input_shape)
self.model.classes = self.yolo_classes
self.model.make_model()
self.model.load_weights(self.weights_path, weights_type = 'yolo')
def predict(self, img:np.ndarray, output_path:str, iou = 0.45, score = 0.25, custom_objects:dict = None,
debug=True):
self.output_path = output_path
self.iou = iou
self.score = score
#img = np.array(Image.open(img))[..., ::-1]
pred_bboxes = self.model.predict(img, iou_threshold = self.iou, score_threshold = self.score)
boxes = []
if (custom_objects != None):
for i in range(len(pred_bboxes)):
check_name = labels[pred_bboxes[i][4]]
check = custom_objects.get(check_name, 'invalid')
if check == 'invalid':
continue
elif check == 'valid':
boxes.append(list(pred_bboxes[i]))
boxes = np.array(boxes)
res = self.model.draw_bboxes(img, boxes)
if debug:
cv2.imwrite(self.output_path, res)
else:
res = self.model.draw_bboxes(img, pred_bboxes)
if debug:
cv2.imwrite(self.output_path, res)
return res
class TinyYOLOv4:
def __init__(self):
self.weights_path = ""
self.model = None
self.yolo_classes = ""
self.iou = 0
self.score = 0
self.input_shape = 0
self.output_path = ""
def load_model(self, weights_path:str = None, classes_path:str = None, input_shape:int = 0):
if (weights_path is None) or (classes_path is None):
raise RuntimeError ('weights_path AND classes_path should not be None.')
self.yolo_classes = classes_path
self.weights_path = weights_path
self.input_shape = input_shape
self.model = yolo_main(tiny = True, shape = self.input_shape)
self.model.classes = self.yolo_classes
self.model.make_model()
self.model.load_weights(self.weights_path, weights_type = 'yolo')
def predict(self, img:np.ndarray, output_path:str, iou = 0.4, score = 0.07, custom_objects:dict = None,
debug=True):
self.output_path = output_path
self.iou = iou
self.score = score
#img = np.array(Image.open(img))[..., ::-1]
pred_bboxes = self.model.predict(img, iou_threshold = self.iou, score_threshold = self.score)
boxes = []
if (custom_objects != None):
for i in range(len(pred_bboxes)):
check_name = labels[pred_bboxes[i][4]]
check = custom_objects.get(check_name, 'invalid')
if check == 'invalid':
continue
elif check == 'valid':
boxes.append(list(pred_bboxes[i]))
boxes = np.array(boxes)
res = self.model.draw_bboxes(img, boxes)
if debug:
cv2.imwrite(self.output_path, res)
else:
res = self.model.draw_bboxes(img, pred_bboxes)
if debug:
cv2.imwrite(self.output_path, res)
return res
| 43.875
| 141
| 0.608895
| 843
| 6,318
| 4.447212
| 0.40688
| 0.03841
| 0.032009
| 0.020272
| 0.546279
| 0.546279
| 0.546279
| 0.546279
| 0.546279
| 0.546279
| 0
| 0.040675
| 0.268439
| 6,318
| 144
| 142
| 43.875
| 0.770446
| 0.186451
| 0
| 0.787879
| 0
| 0
| 0.133502
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0.030303
| 0
| 0.131313
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92e1c91fec4c34f39e9e2622024fad4489b61749
| 5,279
|
py
|
Python
|
scripts/C189/C189Checkin.py
|
xiaopowanyi/py_scripts
|
29f240800eefd6e0f91fd098c35ac3c451172ff8
|
[
"MIT"
] | 2
|
2020-11-14T05:42:49.000Z
|
2020-11-14T05:43:13.000Z
|
scripts/C189/C189Checkin.py
|
J220541674/py_scripts
|
2b72e23041392a2e5f0a7305d7e9802054978384
|
[
"MIT"
] | null | null | null |
scripts/C189/C189Checkin.py
|
J220541674/py_scripts
|
2b72e23041392a2e5f0a7305d7e9802054978384
|
[
"MIT"
] | null | null | null |
import requests, time, re, rsa, json, base64
from urllib import parse
s = requests.Session()
username = ""
password = ""
if(username == "" or password == ""):
username = input("账号:")
password = input("密码:")
def main():
login(username, password)
rand = str(round(time.time()*1000))
surl = f'https://api.cloud.189.cn/mkt/userSign.action?rand={rand}&clientType=TELEANDROID&version=8.6.3&model=SM-G930K'
url = f'https://m.cloud.189.cn/v2/drawPrizeMarketDetails.action?taskId=TASK_SIGNIN&activityId=ACT_SIGNIN'
url2 = f'https://m.cloud.189.cn/v2/drawPrizeMarketDetails.action?taskId=TASK_SIGNIN_PHOTOS&activityId=ACT_SIGNIN'
headers = {
'User-Agent':'Mozilla/5.0 (Linux; Android 5.1.1; SM-G930K Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/74.0.3729.136 Mobile Safari/537.36 Ecloud/8.6.3 Android/22 clientId/355325117317828 clientModel/SM-G930K imsi/460071114317824 clientChannelId/qq proVersion/1.0.6',
"Referer" : "https://m.cloud.189.cn/zhuanti/2016/sign/index.jsp?albumBackupOpened=1",
"Host" : "m.cloud.189.cn",
"Accept-Encoding" : "gzip, deflate",
}
response = s.get(surl,headers=headers)
netdiskBonus = response.json()['netdiskBonus']
if(response.json()['isSign'] == "false"):
print(f"未签到,签到获得{netdiskBonus}M空间")
else:
print(f"已经签到过了,签到获得{netdiskBonus}M空间")
headers = {
'User-Agent':'Mozilla/5.0 (Linux; Android 5.1.1; SM-G930K Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/74.0.3729.136 Mobile Safari/537.36 Ecloud/8.6.3 Android/22 clientId/355325117317828 clientModel/SM-G930K imsi/460071114317824 clientChannelId/qq proVersion/1.0.6',
"Referer" : "https://m.cloud.189.cn/zhuanti/2016/sign/index.jsp?albumBackupOpened=1",
"Host" : "m.cloud.189.cn",
"Accept-Encoding" : "gzip, deflate",
}
response = s.get(url,headers=headers)
try:
if ("errorCode" in response.text):
print(response.json()['errorCode'])
elif (response.json().has_key('description')):
description = response.json()['description']
print(f"抽奖获得{description}")
except:
print(f"抽奖1完成,解析时失败")
try:
response2 = s.get(url2,headers=headers)
if ("errorCode" in response2.text):
print(response.json()['errorCode'])
elif (response2.json().has_key('description')):
description = response2.json()['description']
print(f"抽奖2获得{description}")
except:
print(f"抽奖2完成,解析时失败")
BI_RM = list("0123456789abcdefghijklmnopqrstuvwxyz")
def int2char(a):
return BI_RM[a]
b64map = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
def b64tohex(a):
d = ""
e = 0
c = 0
for i in range(len(a)):
if list(a)[i] != "=":
v = b64map.index(list(a)[i])
if 0 == e:
e = 1
d += int2char(v >> 2)
c = 3 & v
elif 1 == e:
e = 2
d += int2char(c << 2 | v >> 4)
c = 15 & v
elif 2 == e:
e = 3
d += int2char(c)
d += int2char(v >> 2)
c = 3 & v
else:
e = 0
d += int2char(c << 2 | v >> 4)
d += int2char(15 & v)
if e == 1:
d += int2char(c << 2)
return d
def rsa_encode(j_rsakey, string):
rsa_key = f"-----BEGIN PUBLIC KEY-----\n{j_rsakey}\n-----END PUBLIC KEY-----"
pubkey = rsa.PublicKey.load_pkcs1_openssl_pem(rsa_key.encode())
result = b64tohex((base64.b64encode(rsa.encrypt(f'{string}'.encode(), pubkey))).decode())
return result
def calculate_md5_sign(params):
return hashlib.md5('&'.join(sorted(params.split('&'))).encode('utf-8')).hexdigest()
def login(username, password):
url = "https://cloud.189.cn/udb/udb_login.jsp?pageId=1&redirectURL=/main.action"
r = s.get(url)
captchaToken = re.findall(r"captchaToken' value='(.+?)'", r.text)[0]
lt = re.findall(r'lt = "(.+?)"', r.text)[0]
returnUrl = re.findall(r"returnUrl = '(.+?)'", r.text)[0]
paramId = re.findall(r'paramId = "(.+?)"', r.text)[0]
j_rsakey = re.findall(r'j_rsaKey" value="(\S+)"', r.text, re.M)[0]
s.headers.update({"lt": lt})
username = rsa_encode(j_rsakey, username)
password = rsa_encode(j_rsakey, password)
url = "https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do"
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:74.0) Gecko/20100101 Firefox/76.0',
'Referer': 'https://open.e.189.cn/',
}
data = {
"appKey": "cloud",
"accountType": '01',
"userName": f"{{RSA}}{username}",
"password": f"{{RSA}}{password}",
"validateCode": "",
"captchaToken": captchaToken,
"returnUrl": returnUrl,
"mailSuffix": "@189.cn",
"paramId": paramId
}
r = s.post(url, data=data, headers=headers, timeout=5)
if(r.json()['result'] == 0):
print(r.json()['msg'])
else:
print(r.json()['msg'])
redirect_url = r.json()['toUrl']
r = s.get(redirect_url)
return s
if __name__ == "__main__":
main()
| 37.707143
| 305
| 0.586664
| 683
| 5,279
| 4.481698
| 0.311859
| 0.017968
| 0.026135
| 0.021562
| 0.352499
| 0.32179
| 0.282914
| 0.273767
| 0.273767
| 0.273767
| 0
| 0.07761
| 0.23603
| 5,279
| 139
| 306
| 37.978417
| 0.681379
| 0
| 0
| 0.24
| 0
| 0.072
| 0.385111
| 0.043758
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048
| false
| 0.056
| 0.016
| 0.016
| 0.104
| 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
92e5ae34177c1ed1dca21481a52e063cdd40f997
| 5,794
|
py
|
Python
|
distancematrix/tests/consumer/test_distance_matrix.py
|
IDLabResearch/seriesdistancematrix
|
c0e666d036f24184511e766cee9fdfa55f41df97
|
[
"MIT"
] | 12
|
2019-11-22T14:34:51.000Z
|
2021-05-04T19:23:55.000Z
|
distancematrix/tests/consumer/test_distance_matrix.py
|
predict-idlab/seriesdistancematrix
|
c0e666d036f24184511e766cee9fdfa55f41df97
|
[
"MIT"
] | 1
|
2020-04-28T07:59:03.000Z
|
2020-04-28T07:59:03.000Z
|
distancematrix/tests/consumer/test_distance_matrix.py
|
IDLabResearch/seriesdistancematrix
|
c0e666d036f24184511e766cee9fdfa55f41df97
|
[
"MIT"
] | 3
|
2020-03-02T12:39:00.000Z
|
2021-03-22T13:36:25.000Z
|
import numpy as np
from unittest import TestCase
import numpy.testing as npt
from distancematrix.util import diag_indices_of
from distancematrix.consumer.distance_matrix import DistanceMatrix
class TestContextualMatrixProfile(TestCase):
def setUp(self):
self.dist_matrix = np.array([
[8.67, 1.10, 1.77, 1.26, 1.91, 4.29, 6.32, 4.24, 4.64, 5.06, 6.41, 4.07, 4.67, 9.32, 5.09],
[4.33, 4.99, 0.14, 2.79, 2.10, 6.26, 9.40, 4.14, 5.53, 4.26, 8.21, 5.91, 6.83, 9.26, 6.19],
[0.16, 9.05, 1.35, 4.78, 7.01, 4.36, 5.24, 8.81, 7.90, 5.84, 8.90, 7.88, 3.37, 4.70, 6.94],
[0.94, 8.70, 3.87, 6.29, 0.32, 1.79, 5.80, 2.61, 1.43, 6.32, 1.62, 0.20, 2.28, 7.11, 2.15],
[9.90, 4.51, 2.11, 2.83, 5.52, 8.55, 6.90, 0.24, 1.58, 4.26, 8.75, 3.71, 9.93, 8.33, 0.38],
[7.30, 5.84, 9.63, 1.95, 3.76, 3.61, 9.42, 5.56, 5.09, 7.07, 1.90, 4.78, 1.06, 0.69, 3.67],
[2.17, 8.37, 3.99, 4.28, 4.37, 2.86, 8.61, 3.39, 8.37, 6.95, 6.57, 1.79, 7.40, 4.41, 7.64],
[6.26, 0.29, 6.44, 8.84, 1.24, 2.52, 6.25, 3.07, 5.55, 3.19, 8.16, 5.32, 9.01, 0.39, 9.],
[4.67, 8.88, 3.05, 3.06, 2.36, 8.34, 4.91, 5.46, 9.25, 9.78, 0.03, 5.64, 5.10, 3.58, 6.92],
[1.01, 0.91, 6.28, 7.79, 0.68, 5.50, 6.72, 5.11, 0.80, 9.30, 9.77, 4.71, 3.26, 7.29, 6.26]])
def mock_initialise(self, dm):
dm.initialise(1, self.dist_matrix.shape[0], self.dist_matrix.shape[1])
def test_process_diagonal(self):
dm = DistanceMatrix()
self.mock_initialise(dm)
for diag in range(-self.dist_matrix.shape[0] + 1, self.dist_matrix.shape[1]):
diag_ind = diag_indices_of(self.dist_matrix, diag)
dm.process_diagonal(diag, np.atleast_2d(self.dist_matrix[diag_ind]))
npt.assert_equal(dm.distance_matrix, self.dist_matrix)
def test_process_diagonal_partial_calculation(self):
dm = DistanceMatrix()
self.mock_initialise(dm)
correct = np.full_like(self.dist_matrix, np.nan, dtype=float)
for diag in range(-8, self.dist_matrix.shape[1], 3):
diag_ind = diag_indices_of(self.dist_matrix, diag)
dm.process_diagonal(diag, np.atleast_2d(self.dist_matrix[diag_ind]))
correct[diag_ind] = self.dist_matrix[diag_ind]
npt.assert_equal(dm.distance_matrix, correct)
def test_process_column(self):
dm = DistanceMatrix()
self.mock_initialise(dm)
for column in range(0, self.dist_matrix.shape[1]):
dm.process_column(column, np.atleast_2d(self.dist_matrix[:, column]))
npt.assert_equal(dm.distance_matrix, self.dist_matrix)
def test_process_column_partial_calculation(self):
dm = DistanceMatrix()
self.mock_initialise(dm)
correct = np.full_like(self.dist_matrix, np.nan, dtype=float)
for column in [2, 3, 4, 5, 10, 11, 12]:
dm.process_column(column, np.atleast_2d(self.dist_matrix[:, column]))
correct[:, column] = self.dist_matrix[:, column]
npt.assert_equal(dm.distance_matrix, correct)
def test_streaming_process_column(self):
dm = DistanceMatrix()
dm.initialise(1, 5, 5)
dm.process_column(0, np.atleast_2d(self.dist_matrix[0, 0]))
dm.process_column(1, np.atleast_2d(self.dist_matrix[:2, 1]))
expected = np.full((5, 5), np.nan)
expected[0, 0] = self.dist_matrix[0, 0]
expected[:2, 1] = self.dist_matrix[:2, 1]
npt.assert_equal(dm.distance_matrix, expected)
for column in range(0, 5):
dm.process_column(column, np.atleast_2d(self.dist_matrix[:5, :5][:, column]))
npt.assert_equal(dm.distance_matrix, self.dist_matrix[:5, :5])
dm.shift_query(1)
dm.shift_series(3)
correct = np.full((5, 5), np.nan)
correct[0:4, 0:2] = self.dist_matrix[1:5, 3:5]
npt.assert_equal(dm.distance_matrix, correct)
for column in range(0, 5):
dm.process_column(column, np.atleast_2d(self.dist_matrix[1:6, 3:8][:, column]))
npt.assert_equal(dm.distance_matrix, self.dist_matrix[1:6, 3:8])
dm.shift_query(2)
dm.shift_series(1)
dm.process_column(4, np.atleast_2d(self.dist_matrix[3:8, 8]))
correct = np.full((5, 5), np.nan)
correct[0:3, 0:4] = self.dist_matrix[3:6, 4:8]
correct[:, 4] = self.dist_matrix[3:8, 8]
npt.assert_equal(dm.distance_matrix, correct)
def test_streaming_process_diagonal(self):
dm = DistanceMatrix()
dm.initialise(1, 5, 5)
dm.process_diagonal(0, np.atleast_2d(self.dist_matrix[0, 0]))
diag_ind = diag_indices_of(self.dist_matrix[:3, :3], 1)
dm.process_diagonal(1, np.atleast_2d(np.atleast_2d(self.dist_matrix[diag_ind])))
expected = np.full((5, 5), np.nan)
expected[0, 0] = self.dist_matrix[0, 0]
expected[0, 1] = self.dist_matrix[0, 1]
expected[1, 2] = self.dist_matrix[1, 2]
npt.assert_equal(dm.distance_matrix, expected)
for diag in range(-4,5):
diag_ind = diag_indices_of(self.dist_matrix[:5, :5], diag)
dm.process_diagonal(diag, np.atleast_2d(self.dist_matrix[diag_ind]))
npt.assert_equal(dm.distance_matrix, self.dist_matrix[:5, :5])
dm.shift_query(2)
dm.shift_series(1)
expected = self.dist_matrix[2:7, 1:6].copy()
expected[-2:, :] = np.nan
expected[:, -1:] = np.nan
npt.assert_equal(dm.distance_matrix, expected)
for diag in range(-4,5):
diag_ind = diag_indices_of(self.dist_matrix[:5, :5], diag)
dm.process_diagonal(diag, np.atleast_2d(self.dist_matrix[diag_ind]))
npt.assert_equal(dm.distance_matrix, self.dist_matrix[:5, :5])
| 42.602941
| 104
| 0.608733
| 1,002
| 5,794
| 3.367265
| 0.136727
| 0.104327
| 0.182573
| 0.057795
| 0.705691
| 0.64019
| 0.606995
| 0.593657
| 0.524896
| 0.487552
| 0
| 0.132737
| 0.227649
| 5,794
| 135
| 105
| 42.918519
| 0.621229
| 0
| 0
| 0.490196
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127451
| 1
| 0.078431
| false
| 0
| 0.04902
| 0
| 0.137255
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92ec1a79aa56994e71f763b1fea1ca3f88478806
| 1,278
|
py
|
Python
|
pix2pix/Discriminator.py
|
yubin1219/GAN
|
8345095f9816e548c968492efbe92b427b0e06a3
|
[
"MIT"
] | null | null | null |
pix2pix/Discriminator.py
|
yubin1219/GAN
|
8345095f9816e548c968492efbe92b427b0e06a3
|
[
"MIT"
] | null | null | null |
pix2pix/Discriminator.py
|
yubin1219/GAN
|
8345095f9816e548c968492efbe92b427b0e06a3
|
[
"MIT"
] | 1
|
2021-09-17T01:28:50.000Z
|
2021-09-17T01:28:50.000Z
|
import torch
import torch.nn as nn
class Discriminator(nn.Module):
def __init__(self, input_nc, ndf=64, norm_layer=nn.BatchNorm2d, use_sigmoid=False) :
super(Discriminator, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(input_nc, ndf, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(0.2, True)
)
self.conv2 = nn.Sequential(
nn.Conv2d(ndf, ndf * 2, kernel_size=4, stride=2, padding=1),
norm_layer(ndf * 2),
nn.LeakyReLU(0.2, True)
)
self.conv3 = nn.Sequential(
nn.Conv2d(ndf * 2, ndf * 4, kernel_size=4, stride=2, padding=1),
norm_layer(ndf * 4),
nn.LeakyReLU(0.2, True)
)
self.conv4 = nn.Sequential(
nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
norm_layer(ndf * 8),
nn.LeakyReLU(0.2, True)
)
if use_sigmoid:
self.conv5 = nn.Sequential(
nn.Conv2d(ndf * 8, 1, kernel_size=4, stride=2, padding=1),
nn.Sigmoid()
)
else:
self.conv5 = nn.Sequential(
nn.Conv2d(ndf * 8, 1, kernel_size=4, stride=2, padding=1)
)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
x = self.conv4(x)
x = self.conv5(x)
return x
| 29.045455
| 86
| 0.58216
| 191
| 1,278
| 3.780105
| 0.230366
| 0.099723
| 0.116343
| 0.166205
| 0.569252
| 0.450139
| 0.365651
| 0.365651
| 0.3241
| 0.3241
| 0
| 0.062433
| 0.273083
| 1,278
| 43
| 87
| 29.72093
| 0.714747
| 0
| 0
| 0.15
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.05
| 0
| 0.15
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92f1aa0fa9769c9eeef09b7c084da22948285538
| 267
|
py
|
Python
|
src/rpi/fwd.py
|
au-chrismor/selfdrive
|
31325dd7a173bbb16a13e3de4c9598aab0a50632
|
[
"BSD-3-Clause"
] | null | null | null |
src/rpi/fwd.py
|
au-chrismor/selfdrive
|
31325dd7a173bbb16a13e3de4c9598aab0a50632
|
[
"BSD-3-Clause"
] | 6
|
2018-03-15T05:23:55.000Z
|
2018-10-26T10:28:47.000Z
|
src/rpi/fwd.py
|
au-chrismor/selfdrive
|
31325dd7a173bbb16a13e3de4c9598aab0a50632
|
[
"BSD-3-Clause"
] | null | null | null |
"""Set-up and execute the main loop"""
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
#Right motor input A
GPIO.setup(18,GPIO.OUT)
#Right motor input B
GPIO.setup(23,GPIO.OUT)
GPIO.output(18,GPIO.HIGH)
GPIO.output(23,GPIO.LOW)
| 16.6875
| 38
| 0.749064
| 49
| 267
| 4.081633
| 0.571429
| 0.1
| 0.15
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033755
| 0.11236
| 267
| 15
| 39
| 17.8
| 0.810127
| 0.265918
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92f3155f2bddf2f3a6336a7c75d33f3d299e7e91
| 931
|
py
|
Python
|
util/get_from_db.py
|
Abel-Huang/simple-image-classifier
|
89d2822c2b06cdec728f734d43d9638f4b601348
|
[
"MIT"
] | 4
|
2017-05-17T08:01:38.000Z
|
2018-07-22T11:13:55.000Z
|
util/get_from_db.py
|
Abel-Huang/ImageClassifier
|
89d2822c2b06cdec728f734d43d9638f4b601348
|
[
"MIT"
] | null | null | null |
util/get_from_db.py
|
Abel-Huang/ImageClassifier
|
89d2822c2b06cdec728f734d43d9638f4b601348
|
[
"MIT"
] | null | null | null |
import pymysql
# 连接配置信息
config = {
'host': '127.0.0.1',
'port': 3306,
'user': 'root',
'password': '',
'db': 'classdata',
'charset': 'utf8',
'cursorclass': pymysql.cursors.DictCursor,
}
def get_summary_db(unitag):
# 创建连接
conn = pymysql.connect(**config)
cur = conn.cursor()
# 执行sql语句
try:
# 执行sql语句,进行查询
sql = 'SELECT * FROM summary where unitag= %s'
cur.execute(sql,unitag)
# 获取查询结果
result = cur.fetchall()
return result
finally:
cur.close()
conn.close()
def get_result_db(unitag):
# 创建连接
conn = pymysql.connect(**config)
cur = conn.cursor()
# 执行sql语句
try:
# 执行sql语句,进行查询
sql = 'SELECT * FROM result where unitag= %s'
cur.execute(sql,unitag)
# 获取查询结果
result = cur.fetchall()
return result
finally:
cur.close()
conn.close()
| 20.23913
| 54
| 0.541353
| 100
| 931
| 5
| 0.44
| 0.024
| 0.048
| 0.064
| 0.692
| 0.692
| 0.692
| 0.692
| 0.692
| 0.692
| 0
| 0.017433
| 0.322234
| 931
| 45
| 55
| 20.688889
| 0.77496
| 0.077336
| 0
| 0.5625
| 0
| 0
| 0.166078
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0.03125
| 0.03125
| 0
| 0.15625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92f638d897dda2bf328a3077b43f492f38f39bb7
| 1,412
|
py
|
Python
|
jduck/robot.py
|
luutp/jduck
|
3c60a79c926bb9452777cddbebe28982273068a6
|
[
"Apache-2.0"
] | null | null | null |
jduck/robot.py
|
luutp/jduck
|
3c60a79c926bb9452777cddbebe28982273068a6
|
[
"Apache-2.0"
] | null | null | null |
jduck/robot.py
|
luutp/jduck
|
3c60a79c926bb9452777cddbebe28982273068a6
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
jduck.py
Description:
Author: luutp
Contact: [email protected]
Created on: 2021/02/27
"""
# Utilities
# %%
# ================================IMPORT PACKAGES====================================
# Utilities
from traitlets.config.configurable import SingletonConfigurable
# Custom Packages
from jduck.DCMotor import DCMotor
# ================================================================================
class JDuck(SingletonConfigurable):
def __init__(self, *args, **kwargs):
self.left_motor = DCMotor(32, 36, 38, alpha=1.0)
self.right_motor = DCMotor(33, 35, 37, alpha=1.0)
self.left_motor.set_speed(50)
self.right_motor.set_speed(50)
def set_speeds(self, left_speed, right_speed):
self.left_motor.set_speed(left_speed)
self.right_motor.set_speed(right_speed)
def move_forward(self):
self.left_motor.rotate_forward()
self.right_motor.rotate_forward()
def move_backward(self):
self.left_motor.rotate_backward()
self.right_motor.rotate_backward()
def turn_left(self):
self.left_motor.rotate_backward()
self.right_motor.rotate_forward()
def turn_right(self):
self.left_motor.rotate_forward()
self.right_motor.rotate_backward()
def stop(self):
self.left_motor.stop()
self.right_motor.stop()
| 25.214286
| 85
| 0.61119
| 167
| 1,412
| 4.922156
| 0.353293
| 0.087591
| 0.126521
| 0.103406
| 0.40146
| 0.296837
| 0.245742
| 0.245742
| 0.245742
| 0.245742
| 0
| 0.028547
| 0.181303
| 1,412
| 55
| 86
| 25.672727
| 0.682526
| 0.23796
| 0
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.269231
| false
| 0
| 0.076923
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92f8d1944416ddff1cb95f31ed4c8d93f364c956
| 5,193
|
py
|
Python
|
src/nebulo/gql/alias.py
|
olirice/nebulo
|
de9b043fe66d0cb872c5c0f2aca3c5c6f20918a7
|
[
"MIT"
] | 76
|
2020-04-03T01:21:47.000Z
|
2021-12-06T02:54:53.000Z
|
src/nebulo/gql/alias.py
|
olirice/nebulo
|
de9b043fe66d0cb872c5c0f2aca3c5c6f20918a7
|
[
"MIT"
] | 7
|
2020-04-06T04:44:10.000Z
|
2021-05-17T12:38:15.000Z
|
src/nebulo/gql/alias.py
|
olirice/nebulo
|
de9b043fe66d0cb872c5c0f2aca3c5c6f20918a7
|
[
"MIT"
] | 2
|
2020-10-23T10:25:16.000Z
|
2020-10-28T14:16:57.000Z
|
# pylint: disable=missing-class-docstring,invalid-name
import typing
from graphql.language import (
InputObjectTypeDefinitionNode,
InputObjectTypeExtensionNode,
ObjectTypeDefinitionNode,
ObjectTypeExtensionNode,
)
from graphql.type import (
GraphQLArgument,
GraphQLBoolean,
GraphQLEnumType,
GraphQLEnumValue,
GraphQLField,
GraphQLFieldMap,
GraphQLFloat,
GraphQLID,
GraphQLInputFieldMap,
GraphQLInputObjectType,
GraphQLInt,
GraphQLInterfaceType,
GraphQLIsTypeOfFn,
GraphQLList,
GraphQLNonNull,
GraphQLObjectType,
GraphQLResolveInfo,
GraphQLScalarType,
GraphQLSchema,
GraphQLString,
GraphQLType,
Thunk,
)
from graphql.type.definition import GraphQLInputFieldOutType
from nebulo.sql.composite import CompositeType as SQLACompositeType
# Handle name changes from graphql-core and graphql-core-next
try:
from graphql.type import GraphQLInputObjectField as GraphQLInputField
except ImportError:
from graphql.type import GraphQLInputField
Type = GraphQLType
List = GraphQLList
NonNull = GraphQLNonNull
Argument = GraphQLArgument
Boolean = GraphQLBoolean
String = GraphQLString
ScalarType = GraphQLScalarType
ID = GraphQLID
InterfaceType = GraphQLInterfaceType
Int = GraphQLInt
InputField = GraphQLInputField
ResolveInfo = GraphQLResolveInfo
EnumType = GraphQLEnumType
EnumValue = GraphQLEnumValue
Schema = GraphQLSchema
Field = GraphQLField
Float = GraphQLFloat
EnumType = GraphQLEnumType
class HasSQLAModel: # pylint: disable= too-few-public-methods
sqla_table = None
class HasSQLFunction: # pylint: disable= too-few-public-methods
sql_function = None
class HasSQLAComposite: # pylint: disable= too-few-public-methods
sqla_composite: SQLACompositeType
class ObjectType(GraphQLObjectType, HasSQLAModel):
def __init__(
self,
name: str,
fields: Thunk[GraphQLFieldMap],
interfaces: typing.Optional[Thunk[typing.Collection["GraphQLInterfaceType"]]] = None,
is_type_of: typing.Optional[GraphQLIsTypeOfFn] = None,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
description: typing.Optional[str] = None,
ast_node: typing.Optional[ObjectTypeDefinitionNode] = None,
extension_ast_nodes: typing.Optional[typing.Collection[ObjectTypeExtensionNode]] = None,
sqla_model=None,
) -> None:
super().__init__(
name=name,
fields=fields,
interfaces=interfaces,
is_type_of=is_type_of,
extensions=extensions,
description=description,
ast_node=ast_node,
extension_ast_nodes=extension_ast_nodes,
)
self.sqla_model = sqla_model
class ConnectionType(ObjectType):
pass
class EdgeType(ObjectType):
pass
class TableType(ObjectType):
pass
class CompositeType(ObjectType, HasSQLAComposite):
pass
class MutationPayloadType(ObjectType):
pass
class CreatePayloadType(MutationPayloadType):
pass
class UpdatePayloadType(MutationPayloadType):
pass
class DeletePayloadType(MutationPayloadType):
pass
class FunctionPayloadType(MutationPayloadType, HasSQLFunction):
pass
class InputObjectType(GraphQLInputObjectType, HasSQLAModel):
def __init__(
self,
name: str,
fields: Thunk[GraphQLInputFieldMap],
description: typing.Optional[str] = None,
out_type: typing.Optional[GraphQLInputFieldOutType] = None,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
ast_node: typing.Optional[InputObjectTypeDefinitionNode] = None,
extension_ast_nodes: typing.Optional[typing.Collection[InputObjectTypeExtensionNode]] = None,
sqla_model=None,
) -> None:
super().__init__(
name=name,
fields=fields,
description=description,
out_type=out_type,
extensions=extensions,
ast_node=ast_node,
extension_ast_nodes=extension_ast_nodes,
)
self.sqla_model = sqla_model
class CreateInputType(InputObjectType):
pass
class TableInputType(InputObjectType):
pass
class UpdateInputType(InputObjectType):
pass
class DeleteInputType(InputObjectType):
pass
class FunctionInputType(GraphQLInputObjectType):
def __init__(
self,
name: str,
fields: Thunk[GraphQLInputFieldMap],
description: typing.Optional[str] = None,
out_type: typing.Optional[GraphQLInputFieldOutType] = None,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
ast_node: typing.Optional[InputObjectTypeDefinitionNode] = None,
extension_ast_nodes: typing.Optional[typing.Collection[InputObjectTypeExtensionNode]] = None,
sql_function=None,
) -> None:
super().__init__(
name=name,
fields=fields,
description=description,
out_type=out_type,
extensions=extensions,
ast_node=ast_node,
extension_ast_nodes=extension_ast_nodes,
)
self.sql_function = sql_function
| 25.965
| 101
| 0.706913
| 457
| 5,193
| 7.868709
| 0.26477
| 0.062291
| 0.042547
| 0.017519
| 0.385984
| 0.371246
| 0.362347
| 0.342325
| 0.313404
| 0.313404
| 0
| 0
| 0.220104
| 5,193
| 199
| 102
| 26.095477
| 0.887901
| 0.044676
| 0
| 0.43871
| 0
| 0
| 0.004036
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019355
| false
| 0.083871
| 0.051613
| 0
| 0.212903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
92fca9c0141bc42e92af9526839fedc292014b9b
| 292
|
py
|
Python
|
suda/1121/12.py
|
tusikalanse/acm-icpc
|
20150f42752b85e286d812e716bb32ae1fa3db70
|
[
"MIT"
] | 2
|
2021-06-09T12:27:07.000Z
|
2021-06-11T12:02:03.000Z
|
suda/1121/12.py
|
tusikalanse/acm-icpc
|
20150f42752b85e286d812e716bb32ae1fa3db70
|
[
"MIT"
] | 1
|
2021-09-08T12:00:05.000Z
|
2021-09-08T14:52:30.000Z
|
suda/1121/12.py
|
tusikalanse/acm-icpc
|
20150f42752b85e286d812e716bb32ae1fa3db70
|
[
"MIT"
] | null | null | null |
for _ in range(int(input())):
x, y = list(map(int, input().split()))
flag = 1
for i in range(x, y + 1):
n = i * i + i + 41
for j in range(2, n):
if j * j > n:
break
if n % j == 0:
flag = 0
break
if flag == 0:
break
if flag:
print("OK")
else:
print("Sorry")
| 17.176471
| 39
| 0.489726
| 55
| 292
| 2.581818
| 0.436364
| 0.147887
| 0.140845
| 0.169014
| 0.197183
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040609
| 0.325342
| 292
| 17
| 40
| 17.176471
| 0.680203
| 0
| 0
| 0.176471
| 0
| 0
| 0.023891
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.117647
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
13027944554eb9d8705c77a8073c891a250b3842
| 1,853
|
py
|
Python
|
ally/instrument.py
|
platformmaster9/PyAlly
|
55400e0835ae3ac5b3cf58e0e8214c6244aeb149
|
[
"MIT"
] | null | null | null |
ally/instrument.py
|
platformmaster9/PyAlly
|
55400e0835ae3ac5b3cf58e0e8214c6244aeb149
|
[
"MIT"
] | null | null | null |
ally/instrument.py
|
platformmaster9/PyAlly
|
55400e0835ae3ac5b3cf58e0e8214c6244aeb149
|
[
"MIT"
] | null | null | null |
from . import utils
#################################################
""" INSTRUMENT """
#################################################
def Instrument(symbol):
symbol = str(symbol).upper()
return {
'__symbol' : symbol,
'Sym' : symbol,
'SecTyp' : 'CS',
'__type' : 'equity'
}
#################################################
def Equity(symbol):
return Instrument(symbol)
#################################################
def Option (instrument, maturity_date, strike):
return {
**{
'MatDt' : str(maturity_date) + 'T00:00:00.000-05:00',
'StrkPx' : str(int(strike)),
'SecTyp' : 'OPT',
'__maturity' : str(maturity_date),
'__strike' : str(int(strike))
},
**instrument
}
#################################################
def Call (instrument, maturity_date, strike):
# Let Option do some lifting
x = {
**{ 'CFI':'OC' },
**Option(instrument, maturity_date, strike)
}
x['__underlying'] = x['Sym']
x['__type'] = 'call'
x['__symbol'] = utils.option_format(
symbol = x['Sym'],
exp_date = x['__maturity'],
strike = x['__strike'],
direction = 'C'
)
return x
#################################################
def Put (instrument, maturity_date, strike):
# Let Option do some lifting
x = {
**{ 'CFI':'OP' },
**Option(instrument, maturity_date, strike)
}
x['__underlying'] = x['Sym']
x['__type'] = 'put'
x['__symbol'] = utils.option_format(
symbol = x['Sym'],
exp_date = x['__maturity'],
strike = x['__strike'],
direction = 'P'
)
return x
| 29.412698
| 70
| 0.399352
| 148
| 1,853
| 4.736486
| 0.283784
| 0.119829
| 0.154066
| 0.199715
| 0.562054
| 0.513552
| 0.513552
| 0.513552
| 0.513552
| 0.513552
| 0
| 0.010228
| 0.314085
| 1,853
| 63
| 71
| 29.412698
| 0.541306
| 0.028602
| 0
| 0.36
| 0
| 0
| 0.141781
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.02
| 0.04
| 0.22
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1302b2f57e10ec891cc57b121da1cf9b5593731f
| 432
|
py
|
Python
|
airbyte-integrations/connectors/source-yahoo-finance-price/integration_tests/acceptance.py
|
onaio/airbyte
|
38302e82a25f1b66742c3febfbff0668556920f2
|
[
"MIT"
] | 22
|
2020-08-27T00:47:20.000Z
|
2020-09-17T15:39:39.000Z
|
airbyte-integrations/connectors/source-yahoo-finance-price/integration_tests/acceptance.py
|
onaio/airbyte
|
38302e82a25f1b66742c3febfbff0668556920f2
|
[
"MIT"
] | 116
|
2020-08-27T01:11:27.000Z
|
2020-09-19T02:47:52.000Z
|
airbyte-integrations/connectors/source-yahoo-finance-price/integration_tests/acceptance.py
|
onaio/airbyte
|
38302e82a25f1b66742c3febfbff0668556920f2
|
[
"MIT"
] | 1
|
2020-09-15T06:10:01.000Z
|
2020-09-15T06:10:01.000Z
|
#
# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
#
import pytest
pytest_plugins = ("source_acceptance_test.plugin",)
@pytest.fixture(scope="session", autouse=True)
def connector_setup():
"""This fixture is a placeholder for external resources that acceptance test might require."""
# TODO: setup test dependencies if needed. otherwise remove the TODO comments
yield
# TODO: clean up test dependencies
| 25.411765
| 98
| 0.738426
| 55
| 432
| 5.727273
| 0.8
| 0.088889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011142
| 0.168981
| 432
| 16
| 99
| 27
| 0.866295
| 0.585648
| 0
| 0
| 0
| 0
| 0.213018
| 0.171598
| 0
| 0
| 0
| 0.0625
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
130613c0dd1daf7edf4aa6e30bb0303b2984e2ce
| 4,862
|
py
|
Python
|
hail/python/test/hailtop/utils/test_utils.py
|
vrautela/hail
|
7db6189b5b1feafa88452b8470e497d9505d9a46
|
[
"MIT"
] | null | null | null |
hail/python/test/hailtop/utils/test_utils.py
|
vrautela/hail
|
7db6189b5b1feafa88452b8470e497d9505d9a46
|
[
"MIT"
] | null | null | null |
hail/python/test/hailtop/utils/test_utils.py
|
vrautela/hail
|
7db6189b5b1feafa88452b8470e497d9505d9a46
|
[
"MIT"
] | null | null | null |
from hailtop.utils import (partition, url_basename, url_join, url_scheme,
url_and_params, parse_docker_image_reference)
def test_partition_zero_empty():
assert list(partition(0, [])) == []
def test_partition_even_small():
assert list(partition(3, range(3))) == [range(0, 1), range(1, 2), range(2, 3)]
def test_partition_even_big():
assert list(partition(3, range(9))) == [range(0, 3), range(3, 6), range(6, 9)]
def test_partition_uneven_big():
assert list(partition(2, range(9))) == [range(0, 5), range(5, 9)]
def test_partition_toofew():
assert list(partition(6, range(3))) == [range(0, 1), range(1, 2), range(2, 3),
range(3, 3), range(3, 3), range(3, 3)]
def test_url_basename():
assert url_basename('/path/to/file') == 'file'
assert url_basename('https://hail.is/path/to/file') == 'file'
def test_url_join():
assert url_join('/path/to', 'file') == '/path/to/file'
assert url_join('/path/to/', 'file') == '/path/to/file'
assert url_join('/path/to/', '/absolute/file') == '/absolute/file'
assert url_join('https://hail.is/path/to', 'file') == 'https://hail.is/path/to/file'
assert url_join('https://hail.is/path/to/', 'file') == 'https://hail.is/path/to/file'
assert url_join('https://hail.is/path/to/', '/absolute/file') == 'https://hail.is/absolute/file'
def test_url_scheme():
assert url_scheme('https://hail.is/path/to') == 'https'
assert url_scheme('/path/to') == ''
def test_url_and_params():
assert url_and_params('https://example.com/') == ('https://example.com/', {})
assert url_and_params('https://example.com/foo?') == ('https://example.com/foo', {})
assert url_and_params('https://example.com/foo?a=b&c=d') == ('https://example.com/foo', {'a': 'b', 'c': 'd'})
def test_parse_docker_image_reference():
x = parse_docker_image_reference('animage')
assert x.domain is None
assert x.path == 'animage'
assert x.tag is None
assert x.digest is None
assert x.name() == 'animage'
assert str(x) == 'animage'
x = parse_docker_image_reference('hailgenetics/animage')
assert x.domain == 'hailgenetics'
assert x.path == 'animage'
assert x.tag is None
assert x.digest is None
assert x.name() == 'hailgenetics/animage'
assert str(x) == 'hailgenetics/animage'
x = parse_docker_image_reference('localhost:5000/animage')
assert x.domain == 'localhost:5000'
assert x.path == 'animage'
assert x.tag is None
assert x.digest is None
assert x.name() == 'localhost:5000/animage'
assert str(x) == 'localhost:5000/animage'
x = parse_docker_image_reference('localhost:5000/a/b/name')
assert x.domain == 'localhost:5000'
assert x.path == 'a/b/name'
assert x.tag is None
assert x.digest is None
assert x.name() == 'localhost:5000/a/b/name'
assert str(x) == 'localhost:5000/a/b/name'
x = parse_docker_image_reference('localhost:5000/a/b/name:tag')
assert x.domain == 'localhost:5000'
assert x.path == 'a/b/name'
assert x.tag == 'tag'
assert x.digest is None
assert x.name() == 'localhost:5000/a/b/name'
assert str(x) == 'localhost:5000/a/b/name:tag'
x = parse_docker_image_reference('localhost:5000/a/b/name:tag@sha256:abc123')
assert x.domain == 'localhost:5000'
assert x.path == 'a/b/name'
assert x.tag == 'tag'
assert x.digest == 'sha256:abc123'
assert x.name() == 'localhost:5000/a/b/name'
assert str(x) == 'localhost:5000/a/b/name:tag@sha256:abc123'
x = parse_docker_image_reference('localhost:5000/a/b/name@sha256:abc123')
assert x.domain == 'localhost:5000'
assert x.path == 'a/b/name'
assert x.tag is None
assert x.digest == 'sha256:abc123'
assert x.name() == 'localhost:5000/a/b/name'
assert str(x) == 'localhost:5000/a/b/name@sha256:abc123'
x = parse_docker_image_reference('name@sha256:abc123')
assert x.domain is None
assert x.path == 'name'
assert x.tag is None
assert x.digest == 'sha256:abc123'
assert x.name() == 'name'
assert str(x) == 'name@sha256:abc123'
x = parse_docker_image_reference('gcr.io/hail-vdc/batch-worker:123fds312')
assert x.domain == 'gcr.io'
assert x.path == 'hail-vdc/batch-worker'
assert x.tag == '123fds312'
assert x.digest is None
assert x.name() == 'gcr.io/hail-vdc/batch-worker'
assert str(x) == 'gcr.io/hail-vdc/batch-worker:123fds312'
x = parse_docker_image_reference('us-docker.pkg.dev/my-project/my-repo/test-image')
assert x.domain == 'us-docker.pkg.dev'
assert x.path == 'my-project/my-repo/test-image'
assert x.tag is None
assert x.digest is None
assert x.name() == 'us-docker.pkg.dev/my-project/my-repo/test-image'
assert str(x) == 'us-docker.pkg.dev/my-project/my-repo/test-image'
| 37.689922
| 113
| 0.644385
| 741
| 4,862
| 4.118758
| 0.098516
| 0.114679
| 0.06291
| 0.068152
| 0.732962
| 0.678244
| 0.647772
| 0.609109
| 0.486894
| 0.486894
| 0
| 0.04725
| 0.177293
| 4,862
| 128
| 114
| 37.984375
| 0.71575
| 0
| 0
| 0.366337
| 0
| 0
| 0.317153
| 0.145825
| 0
| 0
| 0
| 0
| 0.772277
| 1
| 0.09901
| false
| 0
| 0.009901
| 0
| 0.108911
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
131342de18ae50cff3d8d09f0b5c640ef367d9c5
| 997
|
py
|
Python
|
tests/test_dcd_api.py
|
sadamek/pyIMX
|
52af15e656b400f0812f16cf31d9bf6edbe631ad
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_dcd_api.py
|
sadamek/pyIMX
|
52af15e656b400f0812f16cf31d9bf6edbe631ad
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_dcd_api.py
|
sadamek/pyIMX
|
52af15e656b400f0812f16cf31d9bf6edbe631ad
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2017-2018 Martin Olejar
#
# SPDX-License-Identifier: BSD-3-Clause
# The BSD-3-Clause license for this file can be found in the LICENSE file included with this distribution
# or at https://spdx.org/licenses/BSD-3-Clause.html#licenseText
import os
import pytest
from imx import img
# Used Directories
DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')
# Test Files
DCD_TXT = os.path.join(DATA_DIR, 'dcd_test.txt')
DCD_BIN = os.path.join(DATA_DIR, 'dcd_test.bin')
def setup_module(module):
# Prepare test environment
pass
def teardown_module(module):
# Clean test environment
pass
def test_txt_parser():
with open(DCD_TXT, 'r') as f:
dcd_obj = img.SegDCD.parse_txt(f.read())
assert dcd_obj is not None
assert len(dcd_obj) == 12
def test_bin_parser():
with open(DCD_BIN, 'rb') as f:
dcd_obj = img.SegDCD.parse(f.read())
assert dcd_obj is not None
assert len(dcd_obj) == 12
| 22.155556
| 105
| 0.691073
| 162
| 997
| 4.092593
| 0.450617
| 0.054299
| 0.045249
| 0.042232
| 0.271493
| 0.271493
| 0.271493
| 0.129713
| 0.129713
| 0.129713
| 0
| 0.01875
| 0.197593
| 997
| 44
| 106
| 22.659091
| 0.81
| 0.316951
| 0
| 0.3
| 0
| 0
| 0.046269
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0.1
| 0.15
| 0
| 0.35
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
1324d01927785e4ef25103f8dd91f9cf2502dddb
| 722
|
py
|
Python
|
import.py
|
vmariano/meme-classifier
|
e8d6e73e4a843542143f20381c0741df16d3945d
|
[
"BSD-3-Clause"
] | null | null | null |
import.py
|
vmariano/meme-classifier
|
e8d6e73e4a843542143f20381c0741df16d3945d
|
[
"BSD-3-Clause"
] | 1
|
2022-02-07T12:06:59.000Z
|
2022-02-07T12:06:59.000Z
|
import.py
|
vmariano/meme-classifier
|
e8d6e73e4a843542143f20381c0741df16d3945d
|
[
"BSD-3-Clause"
] | 1
|
2022-02-07T02:38:04.000Z
|
2022-02-07T02:38:04.000Z
|
from dotenv import load_dotenv
load_dotenv()
import sys
import os
import re
import json
import psycopg2
from meme_classifier.images import process_image
path = sys.argv[1]
data = json.load(open(os.path.join(path, 'result.json'), 'r'))
chat_id = data['id']
conn = psycopg2.connect(os.getenv('POSTGRES_CREDENTIALS'))
for m in data['messages']:
if 'photo' in m:
template, text = process_image(open(os.path.join(path, m['photo']), 'rb'))
message_id = m['id']
print(f'processing message {message_id}')
cur = conn.cursor()
cur.execute("INSERT INTO meme (template, text, chat_id, message_id) VALUES (%s, %s, %s, %s)", (template, text, chat_id, message_id))
conn.commit()
| 26.740741
| 140
| 0.67036
| 108
| 722
| 4.361111
| 0.462963
| 0.076433
| 0.042463
| 0.059448
| 0.191083
| 0.11465
| 0
| 0
| 0
| 0
| 0
| 0.005076
| 0.18144
| 722
| 26
| 141
| 27.769231
| 0.791878
| 0
| 0
| 0
| 0
| 0.05
| 0.228532
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.35
| 0
| 0.35
| 0.05
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
132bada4fd58d52bb6e1891b67b8d0c493944002
| 6,243
|
py
|
Python
|
funcoes.py
|
ZezaoDev/Circtrigo
|
5e5f6be0bdee17d30c2993478ca25409b82b6af3
|
[
"MIT"
] | null | null | null |
funcoes.py
|
ZezaoDev/Circtrigo
|
5e5f6be0bdee17d30c2993478ca25409b82b6af3
|
[
"MIT"
] | null | null | null |
funcoes.py
|
ZezaoDev/Circtrigo
|
5e5f6be0bdee17d30c2993478ca25409b82b6af3
|
[
"MIT"
] | null | null | null |
import turtle as t
import math
class circTrigo:
def __init__(self):
self.raio = 0
self.grau = 0
self.seno = 0
self.cosseno = 0
self.tangente = 0
self.quadrante = 0
self.tema = ''
t.bgcolor("black")
t.pencolor("white")
def seta(self):
# DESENHA UMA SETA
t.left(90)
t.forward(5)
t.right(120)
t.forward(10)
t.right(120)
t.forward(10)
t.right(120)
t.forward(5)
t.right(90)
def linha(self, pxls):
# DESENHA UMA LINHA PONTILHADA
pixels = int(pxls//1)
if pixels % 2 == 0:
pixels = pixels + 1
for x in range(0, pixels//10):
t.pendown()
t.forward(5)
t.penup()
t.forward(5)
t.pendown()
t.forward(pixels%10)
def reset(self):
# RETORNA PRA POSICAO INICIAL
t.penup()
t.home()
t.pendown()
t.speed(0)
t.pensize(2)
t.pencolor("white")
def circulo(self, raio):
# DESENHA O CIRCULO
self.raio = raio
t.right(90)
t.penup()
t.forward(self.raio)
t.left(90)
t.pendown()
t.circle(self.raio)
self.reset()
def eixos(self):
# EIXO X
t.penup()
t.backward(self.raio + 50)
t.pendown()
self.linha((self.raio*2)+100)
self.seta()
self.reset()
# EIXO Y
t.left(90)
t.penup()
t.backward(self.raio + 50)
t.pendown()
self.linha((self.raio*2)+100)
self.seta()
self.reset()
def angulo(self, grau):
# DESENHA O ANGULO
self.grau = grau % 360
t.left(self.grau)
t.forward(self.raio)
self.reset()
# DEFINE O VALOR DO SENO, COSSENO E TANGENTE.
self.seno = math.sin(math.radians(self.grau))
self.cosseno = math.cos(math.radians(self.grau))
self.tangente = math.tan(math.radians(self.grau))
# DEFINE O QUADRANTE DO ANGULO
vquad = self.grau
if 0 < vquad < 90:
self.quadrante = 1
elif 90 < vquad < 180:
self.quadrante = 2
elif 180 < vquad < 270:
self.quadrante = 3
elif 270 < vquad < 360:
self.quadrante = 4
if vquad == 0 or vquad == 90 or vquad == 180 or vquad == 270 or vquad == 360: # Quadrante 0 representa os angulos de resultados indefinidos
self.quadrante = 0
def sen(self):
# DESENHA O SENO
t.left(self.grau)
t.forward(self.raio)
t.pencolor("red")
if self.quadrante == 1:
t.left(180 - self.grau)
self.linha(self.cosseno * self.raio)
t.left(90)
t.forward(self.seno * self.raio)
print (self.seno)
elif self.quadrante == 2:
t.right(self.grau)
self.linha((self.cosseno * self.raio) * -1)
t.right(90)
t.forward(self.seno * self.raio)
print (self.seno)
elif self.quadrante == 3:
t.right(self.grau)
self.linha(self.cosseno * self.raio * -1)
t.left(90)
t.forward(self.seno * self.raio * -1)
print (self.seno)
elif self.quadrante == 4:
t.left(180 - self.grau)
self.linha(self.cosseno * self.raio)
t.left(90)
t.forward(self.seno * self.raio)
print (self.seno)
else:
print("Erro: angulo invalido")
self.reset()
def csen(self):
# DESENHA O COSSENO
t.left(self.grau)
t.forward(self.raio)
t.pencolor("green")
if self.quadrante == 1:
t.right(self.grau + 90)
self.linha(self.seno * self.raio)
t.right(90)
t.forward(self.cosseno * self.raio)
print (self.cosseno)
elif self.quadrante == 2:
t.right(self.grau + 90)
self.linha(self.seno * self.raio)
t.right(90)
t.forward(self.cosseno * self.raio)
print (self.cosseno)
elif self.quadrante == 3:
t.right(self.grau - 90)
self.linha(self.seno * self.raio * -1)
t.right(90)
t.forward(self.cosseno * self.raio * -1)
print (self.cosseno)
elif self.quadrante == 4:
t.right(self.grau - 90)
self.linha(self.seno * self.raio * -1)
t.left(90)
t.forward(self.cosseno * self.raio)
print (self.cosseno)
else:
print("Erro: angulo invalido")
self.reset()
def tan(self):
# DESENHA A TANGENTE
t.left(self.grau)
t.penup()
t.pencolor("blue")
if self.quadrante == 1:
t.forward(self.raio)
t.pendown()
self.linha(math.sqrt(((self.tangente*self.raio)**2) + (self.raio**2)) - self.raio)
t.right(self.grau + 90)
t.forward(self.tangente * self.raio)
print (self.tangente)
elif self.quadrante == 2:
t.left(180)
t.forward(self.raio)
t.pendown()
self.linha(math.sqrt(((self.tangente*self.raio)**2) + (self.raio**2)) - self.raio)
t.left(90 - self.grau)
t.forward(self.tangente * self.raio)
print (self.tangente)
elif self.quadrante == 3:
t.left(180)
t.forward(self.raio)
t.pendown()
self.linha(math.sqrt(((self.tangente*self.raio)**2) + (self.raio**2)) - self.raio)
t.right(self.grau - 90)
t.forward(self.tangente * self.raio)
print (self.tangente)
elif self.quadrante == 4:
t.forward(self.raio)
t.pendown()
self.linha(math.sqrt(((self.tangente*self.raio)**2) + (self.raio**2)) - self.raio)
t.right(90 + self.grau)
t.forward(self.tangente * self.raio)
print (self.tangente)
else:
print("Erro: angulo invalido")
self.reset()
| 30.014423
| 149
| 0.492712
| 775
| 6,243
| 3.963871
| 0.125161
| 0.125
| 0.078125
| 0.045573
| 0.665039
| 0.611979
| 0.586263
| 0.574544
| 0.538411
| 0.514648
| 0
| 0.043052
| 0.378664
| 6,243
| 207
| 150
| 30.15942
| 0.748904
| 0.049175
| 0
| 0.708791
| 0
| 0
| 0.015198
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054945
| false
| 0
| 0.010989
| 0
| 0.071429
| 0.082418
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1335675a9f3e2654ba5bacc0a704284147b3d912
| 2,518
|
py
|
Python
|
tests/test_get_set.py
|
snoopyjc/ssf
|
b995cae0e90d38e3758d4944fb144831f9bae0a5
|
[
"Apache-2.0"
] | 3
|
2020-10-07T18:28:12.000Z
|
2020-10-09T15:24:53.000Z
|
tests/test_get_set.py
|
snoopyjc/ssf
|
b995cae0e90d38e3758d4944fb144831f9bae0a5
|
[
"Apache-2.0"
] | 15
|
2020-10-09T15:23:03.000Z
|
2020-10-29T04:34:17.000Z
|
tests/test_get_set.py
|
snoopyjc/ssf
|
b995cae0e90d38e3758d4944fb144831f9bae0a5
|
[
"Apache-2.0"
] | null | null | null |
from ssf import SSF
ssf = SSF(errors='raise')
def test_get_set_days():
dn = ssf.get_day_names()
assert isinstance(dn, tuple)
assert dn == (('Mon', 'Monday'),
('Tue', 'Tuesday'),
('Wed', 'Wednesday'),
('Thu', 'Thursday'),
('Fri', 'Friday'),
('Sat', 'Saturday'),
('Sun', 'Sunday'))
ssf.set_day_names([['MO', 'MON'],
('TU', 'TUE'), ['WE', 'WED'],
('TH', 'THU'), ['FR', 'FRI'],
('SA', 'SAT'), ['SU', 'SUN']])
assert ssf.format('ddd dddd', '10/3/2020') == 'SA SAT'
assert ssf.format('ddd dddd', '10/4/2020') == 'SU SUN'
assert ssf.format('ddd dddd', '10/5/2020') == 'MO MON'
assert ssf.format('ddd dddd', '10/6/2020') == 'TU TUE'
assert ssf.format('ddd dddd', '10/7/2020') == 'WE WED'
assert ssf.format('ddd dddd', '10/8/2020') == 'TH THU'
assert ssf.format('ddd dddd', '10/9/2020') == 'FR FRI'
try:
ssf.set_day_names(2)
assert False # Failed
except ValueError:
pass
try:
ssf.set_day_names((1, 2, 3, 4, 5, 6, 7))
assert False # Failed
except ValueError:
pass
def test_get_set_months():
mn = ssf.get_month_names()
assert isinstance(mn, tuple)
assert mn == (None, ('J', 'Jan', 'January'), ('F', 'Feb', 'February'), ('M', 'Mar', 'March'),
('A', 'Apr', 'April'), ('M', 'May', 'May'), ('J', 'Jun', 'June'), ('J', 'Jul', 'July'),
('A', 'Aug', 'August'), ('S', 'Sep', 'September'), ('O', 'Oct', 'October'),
('N', 'Nov', 'November'), ('D', 'Dec', 'December'))
ssf.set_month_names(mn[:-1] + (('X', 'DE', 'DEC'),) )
assert ssf.format('mmmmm mmm mmmm', '12/3/2020') == 'X DE DEC'
try:
ssf.set_month_names(2)
assert False # Failed
except ValueError:
pass
try:
ssf.set_month_names((0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
assert False # Failed
except ValueError:
pass
def test_get_load_table():
t = ssf.get_table()
assert t[0] == 'General'
assert t[1] == '0'
assert t[14] == 'm/d/yyyy'
assert t[49] == '@'
ssf.load_table({104:'yyyy-mm-dd', 105:'0.0'})
assert ssf.format(104, '10/6/2020') == '2020-10-06'
assert ssf.format(105, 3.4) == '3.4'
assert ssf.load('0') == 1
assert ssf.load('mmm mmmm') == 5 # Will be inserted at 5
assert ssf.load('@') == 49
assert ssf.format(5, '10/6/2020') == 'Oct October'
| 31.475
| 100
| 0.496029
| 349
| 2,518
| 3.501433
| 0.332378
| 0.10311
| 0.135025
| 0.10311
| 0.355155
| 0.319149
| 0.220949
| 0.209493
| 0.162029
| 0.085106
| 0
| 0.074849
| 0.278396
| 2,518
| 79
| 101
| 31.873418
| 0.597689
| 0.01946
| 0
| 0.246154
| 0
| 0
| 0.208283
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.046154
| false
| 0.061538
| 0.015385
| 0
| 0.061538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
133cca4be64ff28929df70bc44eae2ffd26907ef
| 5,889
|
py
|
Python
|
tests/test_list.py
|
amikrop/django-paste
|
109f6e5a42bdc20f3cb671471b3ce5c9e329148b
|
[
"MIT"
] | 3
|
2020-11-11T11:28:47.000Z
|
2022-03-16T11:27:39.000Z
|
tests/test_list.py
|
amikrop/django-paste
|
109f6e5a42bdc20f3cb671471b3ce5c9e329148b
|
[
"MIT"
] | null | null | null |
tests/test_list.py
|
amikrop/django-paste
|
109f6e5a42bdc20f3cb671471b3ce5c9e329148b
|
[
"MIT"
] | 1
|
2021-01-05T15:01:06.000Z
|
2021-01-05T15:01:06.000Z
|
import json
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from paste import constants
from tests.mixins import SnippetListTestCaseMixin
from tests.utils import constant, create_snippet, create_user
class SnippetListTestCase(SnippetListTestCaseMixin, APITestCase):
"""Tests for the snippet list view."""
def url(self):
"""Return the snippet list URL."""
return reverse('snippet-list')
def post(self, **kwargs):
"""Send a POST request to the view's URL with data indicated by given
kwargs, as JSON, using the proper content-type, and return the
response.
"""
return self.client.post(
self.url(), data=json.dumps(kwargs),
content_type='application/json')
def test_get_success(self):
"""Snippet list GET must return all the viewable snippets."""
create_snippet('foo')
create_snippet('bar')
response = self.get()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
self.assertEqual(response.data[0]['content'], 'foo')
self.assertEqual(response.data[1]['content'], 'bar')
def test_get_private(self):
"""Snippet list GET must return private snippets only to those
authorized to view them.
"""
owner = create_user('owner')
create_snippet('foo', private=True, owner=owner)
expected = [0, 0, 1, 1]
def check(i):
response = self.get()
self.assertEqual(len(response.data), expected[i])
self.check_for_users(check, owner)
def test_get_list_foreign(self):
"""Snippet list GET must not return snippets owned by other users if
the LIST_FOREIGN setting is True, unless requested by a staff user.
"""
create_snippet('foo')
create_snippet('bar', owner=self.user)
expected = [0, 1, 2]
def check(i):
response = self.get()
self.assertEqual(len(response.data), expected[i])
with constant('LIST_FOREIGN', False):
self.check_for_users(check)
def test_post_success(self):
"""Snippet list POST must create a new snippet."""
response = self.post(
content='foo', style='friendly', embed_title=False)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['content'], 'foo')
self.assertEqual(response.data['title'], '')
self.assertEqual(response.data['language'], '')
self.assertEqual(response.data['style'], 'friendly')
self.assertEqual(
response.data['line_numbers'], constants.DEFAULT_LINE_NUMBERS)
self.assertFalse(response.data['embed_title'])
self.assertEqual(response.data['private'], constants.DEFAULT_PRIVATE)
self.assertIsNone(response.data['owner'])
def test_post_owner(self):
"""Snippet list POST must store currently authenticated user as the
newly created snippet's owner.
"""
self.client.force_authenticate(self.user)
response = self.post(content='foo')
self.assertEqual(response.data['owner'], self.user.pk)
def test_post_no_content(self):
"""Snippet list POST must return a 400 Bad Request response if no
content field is set.
"""
response = self.post(title='foo')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_post_oversized_title(self):
"""Snippet list POST must return a 400 Bad Request response if the
title field consists of more characters than the TITLE_MAX_LENGTH
setting indicates.
"""
title = 'a' * (constants.TITLE_MAX_LENGTH + 1)
response = self.post(content='foo', title=title)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_post_invalid(self):
"""Snippet list POST must return a 400 Bad Request response if a value
different than the available choices is set for a multiple choice
field.
"""
for field in ['language', 'style']:
response = self.post(
**{'content': 'foo', field: '123-invalid-abc'})
self.assertEqual(
response.status_code, status.HTTP_400_BAD_REQUEST)
def check_post_forbid_anonymous(self, setting):
"""Check that snippet list POST returns a 403 Forbidden response to
anonymous users if the given setting is True.
"""
expected = (
[status.HTTP_403_FORBIDDEN] + [status.HTTP_400_BAD_REQUEST] * 2)
def check(i):
response = self.post()
self.assertEqual(response.status_code, expected[i])
with constant(setting):
self.check_for_users(check)
def test_post_forbid_anonymous(self):
"""Snippet list POST must return a 403 Forbidden response to anonymous
users if the FORBID_ANONYMOUS setting is True.
"""
self.check_post_forbid_anonymous('FORBID_ANONYMOUS')
def test_post_forbid_anonymous_create(self):
"""Snippet list POST must return a 403 Forbidden response to anonymous
users if the FORBID_ANONYMOUS_CREATE setting is True.
"""
self.check_post_forbid_anonymous('FORBID_ANONYMOUS_CREATE')
def test_post_anonymous_private(self):
"""Snippet list POST must return a 400 Bad Request response to
anonymous users who attempt to create a private snippet.
"""
response = self.post(content='foo', private=True)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_pagination(self):
"""Snippet list must be able to handle pagination."""
self.check_pagination()
| 37.993548
| 78
| 0.652742
| 725
| 5,889
| 5.158621
| 0.204138
| 0.076203
| 0.098396
| 0.064973
| 0.468984
| 0.371658
| 0.286096
| 0.263102
| 0.245455
| 0.234225
| 0
| 0.013559
| 0.248599
| 5,889
| 154
| 79
| 38.24026
| 0.831638
| 0.255561
| 0
| 0.22093
| 0
| 0
| 0.062805
| 0.005621
| 0
| 0
| 0
| 0
| 0.244186
| 1
| 0.209302
| false
| 0
| 0.081395
| 0
| 0.325581
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1368b793b823b3bd0b461ed385d6e6b6434e1e68
| 3,455
|
py
|
Python
|
scripts/dev/dockerutil.py
|
axelbarjon/mongodb-kubernetes-operator
|
13eb844c55774ce8a6de51edde1a66b4371f3ef6
|
[
"RSA-MD"
] | 1
|
2021-03-24T17:54:51.000Z
|
2021-03-24T17:54:51.000Z
|
scripts/dev/dockerutil.py
|
axelbarjon/mongodb-kubernetes-operator
|
13eb844c55774ce8a6de51edde1a66b4371f3ef6
|
[
"RSA-MD"
] | 18
|
2021-03-08T13:38:37.000Z
|
2022-02-14T15:06:28.000Z
|
scripts/dev/dockerutil.py
|
axelbarjon/mongodb-kubernetes-operator
|
13eb844c55774ce8a6de51edde1a66b4371f3ef6
|
[
"RSA-MD"
] | 1
|
2021-03-25T13:37:02.000Z
|
2021-03-25T13:37:02.000Z
|
import docker
from dockerfile_generator import render
import os
import json
from tqdm import tqdm
from typing import Union, Any, Optional
def build_image(repo_url: str, tag: str, path: str) -> None:
"""
build_image builds the image with the given tag
"""
client = docker.from_env()
print(f"Building image: {tag}")
client.images.build(tag=tag, path=path)
print("Successfully built image!")
def push_image(tag: str) -> None:
"""
push_image pushes the given tag. It uses
the current docker environment
"""
client = docker.from_env()
print(f"Pushing image: {tag}")
with tqdm(total=100, ascii=False) as progress_bar:
last_percent = 0.0
for line in client.images.push(tag, stream=True):
percent = get_completion_percentage(line)
if percent:
progress_bar.update(percent - last_percent)
last_percent = percent
def retag_image(
old_repo_url: str,
new_repo_url: str,
old_tag: str,
new_tag: str,
path: str,
labels: Optional[dict] = None,
username: Optional[str] = None,
password: Optional[str] = None,
registry: Optional[str] = None,
) -> None:
with open(f"{path}/Dockerfile", "w") as f:
f.write(f"FROM {old_repo_url}:{old_tag}")
client = docker.from_env()
if all(value is not None for value in [username, password, registry]):
client.login(username=username, password=password, registry=registry)
image, _ = client.images.build(path=f"{path}", labels=labels, tag=new_tag)
image.tag(new_repo_url, new_tag)
os.remove(f"{path}/Dockerfile")
# We do not want to republish an image that has not changed, so we check if the new
# pair repo:tag already exists.
try:
image = client.images.pull(new_repo_url, new_tag)
return
# We also need to catch APIError as if the image has been recently deleted (uncommon, but might happen?)
# we will get this kind of error:
# docker.errors.APIError: 500 Server Error: Internal Server Error
# ("unknown: Tag <tag> was deleted or has expired. To pull, revive via time machine"
except (docker.errors.ImageNotFound, docker.errors.APIError) as e:
pass
print(f"Pushing to {new_repo_url}:{new_tag}")
client.images.push(new_repo_url, new_tag)
def get_completion_percentage(line: Any) -> float:
try:
line = json.loads(line.strip().decode("utf-8"))
except ValueError:
return 0
to_skip = ("Preparing", "Waiting", "Layer already exists")
if "status" in line:
if line["status"] in to_skip:
return 0
if line["status"] == "Pushing":
try:
current = float(line["progressDetail"]["current"])
total = float(line["progressDetail"]["total"])
except KeyError:
return 0
result = (current / total) * 100
if result > 100.0:
return 100.0
return result
return 0
def build_and_push_image(repo_url: str, tag: str, path: str, image_type: str) -> None:
"""
build_and_push_operator creates the Dockerfile for the operator
and pushes it to the target repo
"""
dockerfile_text = render(image_type, ["."])
with open(f"{path}/Dockerfile", "w") as f:
f.write(dockerfile_text)
build_image(repo_url, tag, path)
os.remove(f"{path}/Dockerfile")
push_image(tag)
| 32.28972
| 108
| 0.636758
| 473
| 3,455
| 4.528541
| 0.312896
| 0.03268
| 0.023343
| 0.024276
| 0.143324
| 0.080299
| 0.056956
| 0.056956
| 0.030812
| 0.030812
| 0
| 0.009292
| 0.252388
| 3,455
| 106
| 109
| 32.59434
| 0.819977
| 0.176845
| 0
| 0.189189
| 0
| 0
| 0.112388
| 0.017235
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067568
| false
| 0.054054
| 0.081081
| 0
| 0.243243
| 0.054054
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
136a5b3d51a58e910193de0d1a2f38a488d4801a
| 1,435
|
py
|
Python
|
twitoff/twitter.py
|
ChristopherKchilton/twitoff-ChristopherKchilton
|
fbac9899feff256ededab009b28e2f6ebd67f476
|
[
"MIT"
] | 1
|
2021-09-23T22:04:09.000Z
|
2021-09-23T22:04:09.000Z
|
twitoff/twitter.py
|
ChristopherKchilton/twitoff-ChristopherKchilton
|
fbac9899feff256ededab009b28e2f6ebd67f476
|
[
"MIT"
] | null | null | null |
twitoff/twitter.py
|
ChristopherKchilton/twitoff-ChristopherKchilton
|
fbac9899feff256ededab009b28e2f6ebd67f476
|
[
"MIT"
] | null | null | null |
"""Retrieve and request tweets from the DS API"""
import requests
import spacy
from .models import DB, Tweet, User
nlp = spacy.load("my_model")
def vectorize_tweet(tweet_text):
return nlp(tweet_text).vector
# Add and updates tweets
def add_or_update_user(username):
"""Adds and updates the user with twiter handle 'username'
to our database
"""
#TODO: Figure out
try:
r = requests.get(
f"https://lambda-ds-twit-assist.herokuapp.com/user/{username}")
user = r.json()
user_id = user["twitter_handle"]["id"]
# print(user)
# This is either respectively grabs or creates a user for our db
db_user = (User.query.get(user_id)) or User(id=user_id, name=username)
# This adds the db_user to our database
DB.session.add(db_user)
tweets = user["tweets"]
# if tweets:
# db_user.newest_tweet_id = tweets[0].id
for tweet in tweets:
tweet_vector = vectorize_tweet(tweet["full_text"])
tweet_id = tweet["id"]
db_tweet = (Tweet.query.get(tweet_id)) or Tweet(
id=tweet["id"], text=tweet["full_text"], vect=tweet_vector)
db_user.tweets.append(db_tweet)
DB.session.add(db_tweet)
except Exception as e:
print("Error processing {}: {}".format(username, e))
raise e
else:
DB.session.commit()
| 25.175439
| 78
| 0.608362
| 196
| 1,435
| 4.311224
| 0.413265
| 0.049704
| 0.04497
| 0.033136
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000965
| 0.278049
| 1,435
| 56
| 79
| 25.625
| 0.814672
| 0.225087
| 0
| 0
| 0
| 0
| 0.123959
| 0
| 0
| 0
| 0
| 0.017857
| 0
| 1
| 0.074074
| false
| 0
| 0.111111
| 0.037037
| 0.222222
| 0.037037
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
136bbda00809274a9f8b16997fd9b06b349771f8
| 3,754
|
py
|
Python
|
vivo2notld/definitions/person_definition.py
|
gwu-libraries/vivo2notld
|
3f579f8aad28c60119864757e1fe66c2d64a0149
|
[
"MIT"
] | 5
|
2015-09-23T10:05:29.000Z
|
2016-04-07T17:08:38.000Z
|
vivo2notld/definitions/person_definition.py
|
gwu-libraries/vivo2notld
|
3f579f8aad28c60119864757e1fe66c2d64a0149
|
[
"MIT"
] | null | null | null |
vivo2notld/definitions/person_definition.py
|
gwu-libraries/vivo2notld
|
3f579f8aad28c60119864757e1fe66c2d64a0149
|
[
"MIT"
] | null | null | null |
from .document_summary import definition as document_summary_definition
from .organization_summary import definition as organization_summmary_definition
definition = {
"where": "?subj a foaf:Person .",
"fields": {
"name": {
"where": "?subj rdfs:label ?obj ."
},
#Contact info
"email": {
"where": """
?subj obo:ARG_2000028 ?vc .
?vc a vcard:Kind .
?vc vcard:hasEmail ?vce .
?vce a vcard:Email, vcard:Work .
?vce vcard:email ?obj .
"""
},
"telephone": {
"where": """
?subj obo:ARG_2000028 ?vc .
?vc a vcard:Kind .
?vc vcard:hasTelephone ?vct .
?vct a vcard:Telephone .
?vct vcard:telephone ?obj .
"""
},
"address": {
"where": """
?subj obo:ARG_2000028 ?vc .
?vc a vcard:Kind .
?vc vcard:hasAddress ?obj .
""",
"definition": {
"where": "?subj a vcard:Address .",
"fields": {
"address": {
"where": "?subj vcard:streetAddress ?obj ."
},
"city": {
"where": "?subj vcard:locality ?obj ."
},
"state": {
"where": "?subj vcard:region ?obj ."
},
"zip": {
"where": "?subj vcard:postalCode ?obj ."
}
}
}
},
"website": {
"list": True,
"where": """
?subj obo:ARG_2000028 ?vc .
?vc a vcard:Kind .
?vc vcard:hasURL ?vcu .
?vcu a vcard:URL .
?vcu vcard:url ?obj .
""",
"optional": True
},
"researchArea": {
"where": """
?subj vivo:hasResearchArea ?ra .
?ra rdfs:label ?obj .
""",
"optional": True,
"list": True
},
"geographicFocus": {
"where": """
?subj vivo:geographicFocus ?gf .
?gf rdfs:label ?obj .
""",
"optional": True,
"list": True
},
"overview": {
"where": "?subj vivo:overview ?obj .",
"optional": True,
},
"positions": {
"where": "?subj vivo:relatedBy ?obj .",
"definition": {
"where": "?subj a vivo:Position .",
"fields": {
"title": {
"where": "?subj rdfs:label ?obj ."
},
"organization": {
"where": "?subj vivo:relates ?obj .",
"definition": organization_summmary_definition
}
}
},
"optional": True,
"list": True
},
"publications": {
"where": """
?subj vivo:relatedBy ?aship .
?aship a vivo:Authorship .
?aship vivo:relates ?obj .
""",
"definition": document_summary_definition,
"optional": True,
"list": True
}
}
}
| 33.221239
| 80
| 0.34017
| 245
| 3,754
| 5.155102
| 0.261224
| 0.135392
| 0.061758
| 0.047506
| 0.304038
| 0.186857
| 0.186857
| 0.136184
| 0.136184
| 0.136184
| 0
| 0.01627
| 0.541556
| 3,754
| 112
| 81
| 33.517857
| 0.717606
| 0.003197
| 0
| 0.363636
| 0
| 0
| 0.546378
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018182
| 0
| 0.018182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1379138cdd6c153ab5075c9fd6e443c52181da72
| 4,618
|
py
|
Python
|
BridgeOptimizer/scriptBuilder/ScriptBuilderBoundaryConditions.py
|
manuel1618/bridgeOptimizer
|
273bbf27b2c6273e4aaca55debbd9a10bebf7042
|
[
"MIT"
] | 1
|
2022-01-20T16:30:04.000Z
|
2022-01-20T16:30:04.000Z
|
BridgeOptimizer/scriptBuilder/ScriptBuilderBoundaryConditions.py
|
manuel1618/bridgeOptimizer
|
273bbf27b2c6273e4aaca55debbd9a10bebf7042
|
[
"MIT"
] | 13
|
2022-01-07T14:07:15.000Z
|
2022-01-29T19:42:48.000Z
|
BridgeOptimizer/scriptBuilder/ScriptBuilderBoundaryConditions.py
|
manuel1618/bridgeOptimizer
|
273bbf27b2c6273e4aaca55debbd9a10bebf7042
|
[
"MIT"
] | null | null | null |
import os
from typing import List, Tuple
from BridgeOptimizer.datastructure.hypermesh.LoadCollector import LoadCollector
from BridgeOptimizer.datastructure.hypermesh.LoadStep import LoadStep
from BridgeOptimizer.datastructure.hypermesh.Force import Force
from BridgeOptimizer.datastructure.hypermesh.SPC import SPC
class ScriptBuilderBoundaryConditions:
"""
Extra class for generating Loadstep, Loadcollectors, Forces and Constraints
Parameters:
---------
None
"""
def __init__(self) -> None:
pass
def write_tcl_commands_loadCollectors(self, tcl_commands: List) -> None:
"""
Creates all the load collectors (has to be done before creating loadsteps, as the loadcollectors are referenced)
"""
load_collector: LoadCollector = None
# create all load collectors and loads first
for load_collector in LoadCollector.instances:
load_collector_type = load_collector.get_load_collector_type()
load_collector.name = f"{str(load_collector_type.__name__)}_{str(load_collector.get_id())}"
tcl_commands.append(
f"*createentity loadcols includeid=0 name=\"{load_collector.name}\"")
# create loads
for load in load_collector.loads:
if load_collector_type == Force:
force: Force = load
tcl_commands.append(
f"*createmark nodes 1 {' '.join([str(x) for x in force.nodeIds])}")
tcl_commands.append(
f"*loadcreateonentity_curve nodes 1 1 1 {force.x} {force.y} {force.z} 0 {force.x} {force.y} {force.z} 0 0 0 0")
elif load_collector_type == SPC:
spc: SPC = load
tcl_commands.append(
f"*createmark nodes 1 {' '.join([str(x) for x in spc.nodeIds])}")
tcl_commands.append(
f"*loadcreateonentity_curve nodes 1 3 1 {spc.dofs[0]} {spc.dofs[1]} {spc.dofs[2]} {spc.dofs[3]} {spc.dofs[4]} {spc.dofs[5]} 0 0 0 0 0")
tcl_commands.append("*createmark loads 0 1")
tcl_commands.append("*loadsupdatefixedvalue 0 0")
def write_tcl_commands_loadsteps(self, tcl_commands: List) -> None:
"""
Single method to write all tcl commands to the file
"""
self.write_tcl_commands_loadCollectors(tcl_commands)
# create the load step
load_step: LoadStep = None
for load_step in LoadStep.instances:
load_step_id = str(load_step.get_id())
# TODO: should be possible to just use a spc collector - not possible rn.
spc_loadCollector = load_step.spc_loadCollector
load_loadCollector = load_step.load_loadCollector
spc_loadCollector_id = str(spc_loadCollector.get_id())
load_loadCollector_id = str(load_loadCollector.get_id())
tcl_commands.append(
f"*createmark loadcols 1 \"{spc_loadCollector.name}\" \"{load_loadCollector.name}\"")
tcl_commands.append("*createmark outputblocks 1")
tcl_commands.append("*createmark groups 1")
tcl_commands.append(
f"*loadstepscreate \"loadstep_{load_step_id}\" 1")
tcl_commands.append(
f"*attributeupdateint loadsteps {load_step_id} 4143 1 1 0 1")
tcl_commands.append(
f"*attributeupdateint loadsteps {load_step_id} 4709 1 1 0 1")
tcl_commands.append(
f"*setvalue loadsteps id={load_step_id} STATUS=2 4059=1 4060=STATICS")
tcl_commands.append(
f"*attributeupdateentity loadsteps {load_step_id} 4145 1 1 0 loadcols {spc_loadCollector_id}")
tcl_commands.append(
f"*attributeupdateentity loadsteps {load_step_id} 4147 1 1 0 loadcols {load_loadCollector_id}")
tcl_commands.append(
f"*attributeupdateint loadsteps {load_step_id} 3800 1 1 0 0")
tcl_commands.append(
f"*attributeupdateint loadsteps {load_step_id} 707 1 1 0 0")
tcl_commands.append(
f"*attributeupdateint loadsteps {load_step_id} 2396 1 1 0 0")
tcl_commands.append(
f"*attributeupdateint loadsteps {load_step_id} 8134 1 1 0 0")
tcl_commands.append(
f"*attributeupdateint loadsteps {load_step_id} 2160 1 1 0 0")
tcl_commands.append(
f"*attributeupdateint loadsteps {load_step_id} 10212 1 1 0 0")
| 47.122449
| 159
| 0.622347
| 546
| 4,618
| 5.053114
| 0.20696
| 0.115622
| 0.135556
| 0.117434
| 0.39942
| 0.336716
| 0.30917
| 0.295397
| 0.286336
| 0.185212
| 0
| 0.036947
| 0.290819
| 4,618
| 97
| 160
| 47.608247
| 0.805496
| 0.090732
| 0
| 0.264706
| 0
| 0.029412
| 0.318204
| 0.055097
| 0
| 0
| 0
| 0.010309
| 0
| 1
| 0.044118
| false
| 0.014706
| 0.088235
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
137f6361d1e175bc555153af22f77e79ad507096
| 369
|
py
|
Python
|
dataset/dataset.py
|
TeamOfProfGuo/few_shot_baseline
|
f9ac87b9d309fc417589350d3ce61d3612e2be91
|
[
"MIT"
] | null | null | null |
dataset/dataset.py
|
TeamOfProfGuo/few_shot_baseline
|
f9ac87b9d309fc417589350d3ce61d3612e2be91
|
[
"MIT"
] | null | null | null |
dataset/dataset.py
|
TeamOfProfGuo/few_shot_baseline
|
f9ac87b9d309fc417589350d3ce61d3612e2be91
|
[
"MIT"
] | null | null | null |
import os
DEFAULT_ROOT = './materials'
datasets_dt = {}
def register(name):
def decorator(cls):
datasets_dt[name] = cls
return cls
return decorator
def make(name, **kwargs):
if kwargs.get('root_path') is None:
kwargs['root_path'] = os.path.join(DEFAULT_ROOT, name)
dataset = datasets_dt[name](**kwargs)
return dataset
| 17.571429
| 62
| 0.642276
| 48
| 369
| 4.791667
| 0.458333
| 0.130435
| 0.121739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233062
| 369
| 20
| 63
| 18.45
| 0.812721
| 0
| 0
| 0
| 0
| 0
| 0.078804
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.230769
| false
| 0
| 0.076923
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1383ec6b114d686bf9cab5e588bcd0ec41143a37
| 1,033
|
py
|
Python
|
dblib/test_lib.py
|
cyber-fighters/dblib
|
9743122a55bc265f7551dd9283f381678b2703e4
|
[
"MIT"
] | null | null | null |
dblib/test_lib.py
|
cyber-fighters/dblib
|
9743122a55bc265f7551dd9283f381678b2703e4
|
[
"MIT"
] | 1
|
2019-02-25T09:52:31.000Z
|
2019-02-25T09:52:31.000Z
|
dblib/test_lib.py
|
cyber-fighters/dblib
|
9743122a55bc265f7551dd9283f381678b2703e4
|
[
"MIT"
] | null | null | null |
"""Collection of tests."""
import pytest
import dblib.lib
f0 = dblib.lib.Finding('CD spook', 'my_PC', 'The CD drive is missing.')
f1 = dblib.lib.Finding('Unplugged', 'my_PC', 'The power cord is unplugged.')
f2 = dblib.lib.Finding('Monitor switched off', 'my_PC', 'The monitor is switched off.')
def test_add_remove():
"""Test function."""
db = dblib.lib.BackyardDB()
# regular cases
db.add(f0)
assert f0 in db.findings
assert len(db.findings) == 1
db.add(f1)
assert f1 in db.findings
assert len(db.findings) == 2
db.add(f2)
assert f2 in db.findings
assert len(db.findings) == 3
db.add(None)
assert len(db.findings) == 3
db.remove(f1)
assert f1 not in db.findings
assert len(db.findings) == 2
# test exceptions
with pytest.raises(TypeError):
db.add(1)
def test_update():
"""Test function."""
db = dblib.lib.BackyardDB()
db.add(f0)
db.add(f1)
db.update(f1, f2)
assert f2 in db.findings
assert len(db.findings) == 2
| 23.477273
| 87
| 0.629235
| 158
| 1,033
| 4.075949
| 0.303797
| 0.170807
| 0.102484
| 0.177019
| 0.414596
| 0.414596
| 0.276398
| 0.228261
| 0.127329
| 0.127329
| 0
| 0.028786
| 0.226525
| 1,033
| 43
| 88
| 24.023256
| 0.777222
| 0.078412
| 0
| 0.433333
| 0
| 0
| 0.141176
| 0
| 0
| 0
| 0
| 0
| 0.366667
| 1
| 0.066667
| false
| 0
| 0.066667
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
138922f3a893ab484911754fbdc916b94b521606
| 1,341
|
py
|
Python
|
tests/input_files/full_sm_UFO/function_library.py
|
valassi/mg5amc_test
|
2e04f23353051f64e1604b23105fe3faabd32869
|
[
"NCSA"
] | 1
|
2016-07-09T00:05:56.000Z
|
2016-07-09T00:05:56.000Z
|
tests/input_files/full_sm_UFO/function_library.py
|
valassi/mg5amc_test
|
2e04f23353051f64e1604b23105fe3faabd32869
|
[
"NCSA"
] | 4
|
2022-03-10T09:13:31.000Z
|
2022-03-30T16:15:01.000Z
|
tests/input_files/full_sm_UFO/function_library.py
|
valassi/mg5amc_test
|
2e04f23353051f64e1604b23105fe3faabd32869
|
[
"NCSA"
] | 1
|
2016-07-09T00:06:15.000Z
|
2016-07-09T00:06:15.000Z
|
# This file is part of the UFO.
#
# This file contains definitions for functions that
# are extensions of the cmath library, and correspond
# either to functions that are in cmath, but inconvenient
# to access from there (e.g. z.conjugate()),
# or functions that are simply not defined.
#
#
from __future__ import absolute_import
__date__ = "22 July 2010"
__author__ = "[email protected]"
import cmath
from .object_library import all_functions, Function
#
# shortcuts for functions from cmath
#
complexconjugate = Function(name = 'complexconjugate',
arguments = ('z',),
expression = 'z.conjugate()')
re = Function(name = 're',
arguments = ('z',),
expression = 'z.real')
im = Function(name = 'im',
arguments = ('z',),
expression = 'z.imag')
# New functions (trigonometric)
sec = Function(name = 'sec',
arguments = ('z',),
expression = '1./cmath.cos(z)')
asec = Function(name = 'asec',
arguments = ('z',),
expression = 'cmath.acos(1./z)')
csc = Function(name = 'csc',
arguments = ('z',),
expression = '1./cmath.sin(z)')
acsc = Function(name = 'acsc',
arguments = ('z',),
expression = 'cmath.asin(1./z)')
| 23.946429
| 57
| 0.57047
| 149
| 1,341
| 5.033557
| 0.469799
| 0.112
| 0.186667
| 0.084
| 0.069333
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010549
| 0.293065
| 1,341
| 55
| 58
| 24.381818
| 0.780591
| 0.251305
| 0
| 0.269231
| 0
| 0
| 0.166329
| 0.024341
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.115385
| 0
| 0.115385
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.