function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def test_safe_search_detection(self, annotator_client_mock):
# Given
safe_search_detection_method = annotator_client_mock.safe_search_detection
safe_search_detection_method.return_value = AnnotateImageResponse(
safe_search_annotation=SafeSearchAnnotation(
adult="VERY_UNLIKELY",
spoof="VERY_UNLIKELY",
medical="VERY_UNLIKELY",
violence="VERY_UNLIKELY",
racy="VERY_UNLIKELY",
)
)
# When
self.hook.safe_search_detection(image=DETECT_TEST_IMAGE)
# Then
safe_search_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_safe_search_detection_with_additional_properties(self, annotator_client_mock):
# Given
safe_search_detection_method = annotator_client_mock.safe_search_detection
safe_search_detection_method.return_value = AnnotateImageResponse(
safe_search_annotation=SafeSearchAnnotation(
adult="VERY_UNLIKELY",
spoof="VERY_UNLIKELY",
medical="VERY_UNLIKELY",
violence="VERY_UNLIKELY",
racy="VERY_UNLIKELY",
)
)
# When
self.hook.safe_search_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
safe_search_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def score_dt(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename) | priscillaboyd/SPaT_Prediction | [
11,
2,
11,
2,
1496250432
] |
def save_dt_model(model_name, model, folder):
"""
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file) | priscillaboyd/SPaT_Prediction | [
11,
2,
11,
2,
1496250432
] |
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl | openvswitch/ovn-scale-test | [
17,
21,
17,
22,
1460439683
] |
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon() | openvswitch/ovn-scale-test | [
17,
21,
17,
22,
1460439683
] |
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches | openvswitch/ovn-scale-test | [
17,
21,
17,
22,
1460439683
] |
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush() | openvswitch/ovn-scale-test | [
17,
21,
17,
22,
1460439683
] |
def __init__(self, conf, topic_name):
self.topic_name = topic_name
self.producer = Producer(conf)
self.counter = 0
self.running = True | ibm-messaging/message-hub-samples | [
92,
163,
92,
5,
1444758625
] |
def on_delivery(self, err, msg):
if err:
print('Delivery report: Failed sending message {0}'.format(msg.value()))
print(err)
# We could retry sending the message
else:
print('Message produced, offset: {0}'.format(msg.offset())) | ibm-messaging/message-hub-samples | [
92,
163,
92,
5,
1444758625
] |
def __init__(self):
super(TestAppModelSimple, self).__init__()
self.my_key = ""
self.stringField = "" | icoxfog417/pykintone | [
21,
11,
21,
8,
1435900514
] |
def __init__(self, target=None, listeners=None, name=None, service=None, cleanup_method=None,
heartbeat_secs=10, **kwargs):
"""
Constructs the control part of an ION process.
Used by the container's IonProcessThreadManager, as part of spawn_process.
@param target A callable to run in the PyonThread. If None (typical), will use the target method
defined in this class.
@param listeners A list of listening endpoints attached to this thread.
@param name The name of this ION process.
@param service An instance of the BaseService derived class which contains the business logic for
the ION process.
@param cleanup_method An optional callable to run when the process is stopping. Runs after all other
notify_stop calls have run. Should take one param, this instance.
@param heartbeat_secs Number of seconds to wait in between heartbeats.
"""
self._startup_listeners = listeners or []
self.listeners = []
self._listener_map = {}
self.name = name
self.service = service
self._cleanup_method = cleanup_method
self.thread_manager = ThreadManager(failure_notify_callback=self._child_failed) # bubbles up to main thread manager
self._dead_children = [] # save any dead children for forensics
self._ctrl_thread = None
self._ctrl_queue = Queue()
self._ready_control = Event()
self._errors = []
self._ctrl_current = None # set to the AR generated by _routing_call when in the context of a call
# processing vs idle time (ms)
self._start_time = None
self._proc_time = 0 # busy time since start
self._proc_time_prior = 0 # busy time at the beginning of the prior interval
self._proc_time_prior2 = 0 # busy time at the beginning of 2 interval's ago
self._proc_interval_num = 0 # interval num of last record
# for heartbeats, used to detect stuck processes
self._heartbeat_secs = heartbeat_secs # amount of time to wait between heartbeats
self._heartbeat_stack = None # stacktrace of last heartbeat
self._heartbeat_time = None # timestamp of heart beat last matching the current op
self._heartbeat_op = None # last operation (by AR)
self._heartbeat_count = 0 # number of times this operation has been seen consecutively
self._log_call_exception = CFG.get_safe("container.process.log_exceptions", False)
self._log_call_dbstats = CFG.get_safe("container.process.log_dbstats", False)
self._warn_call_dbstmt_threshold = CFG.get_safe("container.process.warn_dbstmt_threshold", 0)
PyonThread.__init__(self, target=target, **kwargs) | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def time_stats(self):
"""
Returns a 5-tuple of (total time, idle time, processing time, time since prior interval start,
busy since prior interval start), all in ms (int).
"""
now = get_ion_ts_millis()
running_time = now - self._start_time
idle_time = running_time - self._proc_time
cur_interval = now / STAT_INTERVAL_LENGTH
now_since_prior = now - (cur_interval - 1) * STAT_INTERVAL_LENGTH
if cur_interval == self._proc_interval_num:
proc_time_since_prior = self._proc_time-self._proc_time_prior2
elif cur_interval-1 == self._proc_interval_num:
proc_time_since_prior = self._proc_time-self._proc_time_prior
else:
proc_time_since_prior = 0
return (running_time, idle_time, self._proc_time, now_since_prior, proc_time_since_prior) | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def add_endpoint(self, listener, activate=True):
"""
Adds a listening endpoint to be managed by this ION process.
Spawns the listen loop and sets the routing call to synchronize incoming messages
here. If this process hasn't been started yet, adds it to the list of listeners
to start on startup.
@param activate If True (default), start consuming from listener
"""
if self.proc:
listener.routing_call = self._routing_call
if self.name:
svc_name = "unnamed-service"
if self.service is not None and hasattr(self.service, 'name'):
svc_name = self.service.name
listen_thread_name = "%s-%s-listen-%s" % (svc_name, self.name, len(self.listeners)+1)
else:
listen_thread_name = "unknown-listener-%s" % (len(self.listeners)+1)
listen_thread = self.thread_manager.spawn(listener.listen, thread_name=listen_thread_name, activate=activate)
listen_thread.proc._glname = "ION Proc listener %s" % listen_thread_name
self._listener_map[listener] = listen_thread
self.listeners.append(listener)
else:
self._startup_listeners.append(listener) | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def target(self, *args, **kwargs):
"""
Entry point for the main process greenlet.
Setup the base properties for this process (mainly the control thread).
"""
if self.name:
threading.current_thread().name = "%s-target" % self.name
# start time
self._start_time = get_ion_ts_millis()
self._proc_interval_num = self._start_time / STAT_INTERVAL_LENGTH
# spawn control flow loop
self._ctrl_thread = self.thread_manager.spawn(self._control_flow)
self._ctrl_thread.proc._glname = "ION Proc CL %s" % self.name
# wait on control flow loop, heartbeating as appropriate
while not self._ctrl_thread.ev_exit.wait(timeout=self._heartbeat_secs):
hbst = self.heartbeat()
if not all(hbst):
log.warn("Heartbeat status for process %s returned %s", self, hbst)
if self._heartbeat_stack is not None:
stack_out = "".join(traceback.format_list(self._heartbeat_stack))
else:
stack_out = "N/A"
#raise PyonHeartbeatError("Heartbeat failed: %s, stacktrace:\n%s" % (hbst, stack_out))
log.warn("Heartbeat failed: %s, stacktrace:\n%s", hbst, stack_out)
# this is almost a no-op as we don't fall out of the above loop without
# exiting the ctrl_thread, but having this line here makes testing much easier.
self._ctrl_thread.join() | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def has_pending_call(self, ar):
"""
Returns true if the call (keyed by the AsyncResult returned by _routing_call) is still pending.
"""
for _, qar, _, _, _, _ in self._ctrl_queue.queue:
if qar == ar:
return True
return False | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def _interrupt_control_thread(self):
"""
Signal the control flow thread that it needs to abort processing, likely due to a timeout.
"""
self._ctrl_thread.proc.kill(exception=OperationInterruptedException, block=False) | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def _control_flow(self):
"""
Entry point for process control thread of execution.
This method is run by the control greenlet for each ION process. Listeners attached
to the process, either RPC Servers or Subscribers, synchronize calls to the process
by placing call requests into the queue by calling _routing_call.
This method blocks until there are calls to be made in the synchronized queue, and
then calls from within this greenlet. Any exception raised is caught and re-raised
in the greenlet that originally scheduled the call. If successful, the AsyncResult
created at scheduling time is set with the result of the call.
"""
svc_name = getattr(self.service, "name", "unnamed-service") if self.service else "unnamed-service"
proc_id = getattr(self.service, "id", "unknown-pid") if self.service else "unknown-pid"
if self.name:
threading.current_thread().name = "%s-%s" % (svc_name, self.name)
thread_base_name = threading.current_thread().name
self._ready_control.set()
for calltuple in self._ctrl_queue:
calling_gl, ar, call, callargs, callkwargs, context = calltuple
request_id = (context or {}).get("request-id", None)
if request_id:
threading.current_thread().name = thread_base_name + "-" + str(request_id)
#log.debug("control_flow making call: %s %s %s (has context: %s)", call, callargs, callkwargs, context is not None)
res = None
start_proc_time = get_ion_ts_millis()
self._record_proc_time(start_proc_time)
# check context for expiration
if context is not None and 'reply-by' in context:
if start_proc_time >= int(context['reply-by']):
log.info("control_flow: attempting to process message already exceeding reply-by, ignore")
# raise a timeout in the calling thread to allow endpoints to continue processing
e = IonTimeout("Reply-by time has already occurred (reply-by: %s, op start time: %s)" % (context['reply-by'], start_proc_time))
calling_gl.kill(exception=e, block=False)
continue
# If ar is set, means it is cancelled
if ar.ready():
log.info("control_flow: attempting to process message that has been cancelled, ignore")
continue
init_db_stats()
try:
# ******************************************************************
# ****** THIS IS WHERE THE RPC OPERATION/SERVICE CALL IS MADE ******
with self.service.push_context(context), \
self.service.container.context.push_context(context):
self._ctrl_current = ar
res = call(*callargs, **callkwargs)
# ****** END CALL, EXCEPTION HANDLING FOLLOWS ******
# ******************************************************************
except OperationInterruptedException:
# endpoint layer takes care of response as it's the one that caused this
log.debug("Operation interrupted")
pass
except Exception as e:
if self._log_call_exception:
log.exception("PROCESS exception: %s" % e.message)
# Raise the exception in the calling greenlet.
# Try decorating the args of the exception with the true traceback -
# this should be reported by ThreadManager._child_failed
exc = PyonThreadTraceback("IonProcessThread _control_flow caught an exception "
"(call: %s, *args %s, **kwargs %s, context %s)\n"
"True traceback captured by IonProcessThread' _control_flow:\n\n%s" % (
call, callargs, callkwargs, context, traceback.format_exc()))
e.args = e.args + (exc,)
if isinstance(e, (TypeError, IonException)):
# Pass through known process exceptions, in particular IonException
calling_gl.kill(exception=e, block=False)
else:
# Otherwise, wrap unknown, forward and hopefully we can continue on our way
self._errors.append((call, callargs, callkwargs, context, e, exc))
log.warn(exc)
log.warn("Attempting to continue...")
# Note: Too large exception string will crash the container (when passed on as msg header).
exception_str = str(exc)
if len(exception_str) > 10000:
exception_str = (
"Exception string representation too large. "
"Begin and end of the exception:\n"
+ exception_str[:2000] + "\n...\n" + exception_str[-2000:]
)
calling_gl.kill(exception=ContainerError(exception_str), block=False)
finally:
try:
# Compute statistics
self._compute_proc_stats(start_proc_time)
db_stats = get_db_stats()
if db_stats:
if self._warn_call_dbstmt_threshold > 0 and db_stats.get("count.all", 0) >= self._warn_call_dbstmt_threshold:
stats_str = ", ".join("{}={}".format(k, db_stats[k]) for k in sorted(db_stats.keys()))
log.warn("PROC_OP '%s.%s' EXCEEDED DB THRESHOLD. stats=%s", svc_name, call.__name__, stats_str)
elif self._log_call_dbstats:
stats_str = ", ".join("{}={}".format(k, db_stats[k]) for k in sorted(db_stats.keys()))
log.info("PROC_OP '%s.%s' DB STATS: %s", svc_name, call.__name__, stats_str)
clear_db_stats()
if stats_callback:
stats_callback(proc_id=proc_id, proc_name=self.name, svc=svc_name, op=call.__name__,
request_id=request_id, context=context,
db_stats=db_stats, proc_stats=self.time_stats, result=res, exc=None)
except Exception:
log.exception("Error computing process call stats")
self._ctrl_current = None
threading.current_thread().name = thread_base_name
# Set response in AsyncEvent of caller (endpoint greenlet)
ar.set(res) | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def _compute_proc_stats(self, start_proc_time):
cur_time = get_ion_ts_millis()
self._record_proc_time(cur_time)
proc_time = cur_time - start_proc_time
self._proc_time += proc_time | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def _notify_stop(self):
"""
Called when the process is about to be shut down.
Instructs all listeners to close, puts a StopIteration into the synchronized queue,
and waits for the listeners to close and for the control queue to exit.
"""
for listener in self.listeners:
try:
listener.close()
except Exception as ex:
tb = traceback.format_exc()
log.warn("Could not close listener, attempting to ignore: %s\nTraceback:\n%s", ex, tb)
self._ctrl_queue.put(StopIteration)
# wait_children will join them and then get() them, which may raise an exception if any of them
# died with an exception.
self.thread_manager.wait_children(30)
PyonThread._notify_stop(self)
# run the cleanup method if we have one
if self._cleanup_method is not None:
try:
self._cleanup_method(self)
except Exception as ex:
log.warn("Cleanup method error, attempting to ignore: %s\nTraceback: %s", ex, traceback.format_exc()) | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def _create_thread(self, target=None, **kwargs):
return IonProcessThread(target=target, heartbeat_secs=self.heartbeat_secs, **kwargs) | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def call_process(self, message, stream_route, stream_id):
"""
Handles pre-processing of packet and process work
"""
self.process(message) | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def get_ion_actor_id(process):
"""Given an ION process, return the ion-actor-id from the context, if set and present"""
ion_actor_id = None
if process:
ctx = process.get_context()
ion_actor_id = ctx.get(MSG_HEADER_ACTOR, None) if ctx else None
return ion_actor_id | scionrep/scioncc | [
3,
10,
3,
1,
1435685091
] |
def cmd_list(self, argument):
"""List commands"""
arg = argument.lower()
index = self.bot.help_index
public = "public commands -- %s" % " ".join(index['public'])
private = "private commands -- %s" % " ".join(index['private'])
if 'all' in arg or 'both' in arg:
output = "\n".join((public, private))
elif 'pub' in arg or self.target.startswith('#'):
output = public
elif 'priv' in arg or not self.target.startswith('#'):
output = private
else:
# we shouldn't be here
self.logger.error("cmd_list")
return
self.send(self.target, output) | nickraptis/fidibot | [
1,
2,
1,
2,
1367162213
] |
def cmd_help(self, argument):
"""Get help on a command or module"""
arg = argument.lower()
index = self.bot.help_index
target = self.target
args = arg.split()
if not args:
s = "usage: help <command> [public|private] / help module <module>"
self.send(target, s)
elif args[0] == 'module':
args.pop(0)
if not args:
self.send(target, "usage: help module <module>")
else:
help_item = index['modules'].get(args[0])
if help_item:
self.send(target, help_item['summary'])
else:
self.send(target, _("No help for %s"), args[0])
else:
args.append("")
cmd = args.pop(0)
cmd_type = args.pop(0)
if 'pu' in cmd_type or self.target.startswith('#'):
cmd_type = 'public'
elif 'pr' in cmd_type or not self.target.startswith('#'):
cmd_type = 'private'
else:
# we shouldn't be here
self.logger.error("cmd_list")
return
help_item = index[cmd_type].get(cmd)
if help_item:
self.send(target, index[cmd_type][cmd]['summary'])
else:
self.send(target, _("No help for %s"), cmd) | nickraptis/fidibot | [
1,
2,
1,
2,
1367162213
] |
def test_column_list_select2(self):
# make sure SDC copies the columns like Pandas does
def test_impl(df):
df2 = df[['A']]
df2['A'] += 10
return df2.A, df.A
hpat_func = self.jit(test_impl)
n = 11
df = pd.DataFrame(
{'A': np.arange(n), 'B': np.ones(n), 'C': np.random.ranf(n)})
np.testing.assert_array_equal(hpat_func(df.copy())[1], test_impl(df)[1]) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_pd_DataFrame_from_series_par(self):
def test_impl(n):
S1 = pd.Series(np.ones(n))
S2 = pd.Series(np.random.ranf(n))
df = pd.DataFrame({'A': S1, 'B': S2})
return df.A.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
self.assertEqual(count_parfor_OneDs(), 1) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_getitem_bool_series(self):
def test_impl(df):
return df['A'][df['B']].values
hpat_func = self.jit(test_impl)
df = pd.DataFrame({'A': [1, 2, 3], 'B': [True, False, True]})
np.testing.assert_array_equal(test_impl(df), hpat_func(df)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_fillna(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = np.nan
df = pd.DataFrame({'A': A})
B = df.A.fillna(5.0)
return B.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl()) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_fillna_inplace(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = np.nan
df = pd.DataFrame({'A': A})
df.A.fillna(5.0, inplace=True)
return df.A.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl()) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_column_mean(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = np.nan
df = pd.DataFrame({'A': A})
return df.A.mean()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl()) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_column_var(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = 4.0
df = pd.DataFrame({'A': A})
return df.A.var()
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(), test_impl()) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_column_std(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = 4.0
df = pd.DataFrame({'A': A})
return df.A.std()
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(), test_impl()) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_column_map(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n)})
df['B'] = df.A.map(lambda a: 2 * a)
return df.B.sum()
n = 121
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(n), test_impl(n)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_column_map_arg(self):
def test_impl(df):
df['B'] = df.A.map(lambda a: 2 * a)
return
n = 121
df1 = pd.DataFrame({'A': np.arange(n)})
df2 = pd.DataFrame({'A': np.arange(n)})
hpat_func = self.jit(test_impl)
hpat_func(df1)
self.assertTrue(hasattr(df1, 'B'))
test_impl(df2)
np.testing.assert_equal(df1.B.values, df2.B.values) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_cumsum(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
Ac = df.A.cumsum()
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_array_OneDs(), 2)
self.assertEqual(count_parfor_REPs(), 0)
self.assertEqual(count_parfor_OneDs(), 2)
self.assertTrue(dist_IR_contains('dist_cumsum')) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_column_distribution(self):
# make sure all column calls are distributed
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
df.A.fillna(5.0, inplace=True)
DF = df.A.fillna(5.0)
s = DF.sum()
m = df.A.mean()
v = df.A.var()
t = df.A.std()
Ac = df.A.cumsum()
return Ac.sum() + s + m + v + t
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
self.assertTrue(dist_IR_contains('dist_cumsum')) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_quantile_parallel(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.float64)})
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_quantile_parallel_float_nan(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.float32)})
df.A[0:100] = np.nan
df.A[200:331] = np.nan
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_quantile_parallel_int(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.int32)})
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_quantile_sequential(self):
def test_impl(A):
df = pd.DataFrame({'A': A})
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
A = np.arange(0, n, 1, np.float64)
np.testing.assert_almost_equal(hpat_func(A), test_impl(A)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_nunique(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n)})
df.A[2] = 0
return df.A.nunique()
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(n), test_impl(n)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_nunique_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return df.four.nunique()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_nunique_str(self):
def test_impl(n):
df = pd.DataFrame({'A': ['aa', 'bb', 'aa', 'cc', 'cc']})
return df.A.nunique()
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(n), test_impl(n)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_nunique_str_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return df.two.nunique()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_unique_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return (df.four.unique() == 3.0).sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_unique_str_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return (df.two.unique() == 'foo').sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_describe(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.float64)})
return df.A.describe()
hpat_func = self.jit(test_impl)
n = 1001
hpat_func(n)
# XXX: test actual output
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_contains_regex(self):
def test_impl():
A = StringArray(['ABC', 'BB', 'ADEF'])
df = pd.DataFrame({'A': A})
B = df.A.str.contains('AB*', regex=True)
return B.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), 2) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_contains_noregex(self):
def test_impl():
A = StringArray(['ABC', 'BB', 'ADEF'])
df = pd.DataFrame({'A': A})
B = df.A.str.contains('BB', regex=False)
return B.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), 1) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_replace_regex(self):
def test_impl(df):
return df.A.str.replace('AB*', 'EE', regex=True)
df = pd.DataFrame({'A': ['ABCC', 'CABBD']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_replace_noregex(self):
def test_impl(df):
return df.A.str.replace('AB', 'EE', regex=False)
df = pd.DataFrame({'A': ['ABCC', 'CABBD']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_replace_regex_parallel(self):
def test_impl(df):
B = df.A.str.replace('AB*', 'EE', regex=True)
return B
n = 5
A = ['ABCC', 'CABBD', 'CCD', 'CCDAABB', 'ED']
start, end = get_start_end(n)
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_split(self):
def test_impl(df):
return df.A.str.split(',')
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D', 'G', '', 'g,f']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_split_default(self):
def test_impl(df):
return df.A.str.split()
df = pd.DataFrame({'A': ['AB CC', 'C ABB D', 'G ', ' ', 'g\t f']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_split_filter(self):
def test_impl(df):
B = df.A.str.split(',')
df2 = pd.DataFrame({'B': B})
return df2[df2.B.str.len() > 1]
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D', 'G', '', 'g,f']})
hpat_func = self.jit(test_impl)
pd.testing.assert_frame_equal(
hpat_func(df), test_impl(df).reset_index(drop=True)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_split_box_df(self):
def test_impl(df):
return pd.DataFrame({'B': df.A.str.split(',')})
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df).B, test_impl(df).B, check_names=False) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_split_unbox_df(self):
def test_impl(df):
return df.A.iloc[0]
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
df2 = pd.DataFrame({'A': df.A.str.split(',')})
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(df2), test_impl(df2)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_split_bool_index(self):
def test_impl(df):
C = df.A.str.split(',')
return C[df.B == 'aa']
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D'], 'B': ['aa', 'bb']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_split_parallel(self):
def test_impl(df):
B = df.A.str.split(',')
return B
n = 5
start, end = get_start_end(n)
A = ['AB,CC', 'C,ABB,D', 'CAD', 'CA,D', 'AA,,D']
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_get(self):
def test_impl(df):
B = df.A.str.split(',')
return B.str.get(1)
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_split(self):
def test_impl(df):
return df.A.str.split(',')
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(hpat_func(df), test_impl(df), check_names=False) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_get_parallel(self):
def test_impl(df):
A = df.A.str.split(',')
B = A.str.get(1)
return B
n = 5
start, end = get_start_end(n)
A = ['AB,CC', 'C,ABB,D', 'CAD,F', 'CA,D', 'AA,,D']
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_get_to_numeric(self):
def test_impl(df):
B = df.A.str.split(',')
C = pd.to_numeric(B.str.get(1), errors='coerce')
return C
df = pd.DataFrame({'A': ['AB,12', 'C,321,D']})
hpat_func = self.jit(locals={'C': types.int64[:]})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_flatten(self):
def test_impl(df):
A = df.A.str.split(',')
return pd.Series(list(itertools.chain(*A)))
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_str_flatten_parallel(self):
def test_impl(df):
A = df.A.str.split(',')
B = pd.Series(list(itertools.chain(*A)))
return B
n = 5
start, end = get_start_end(n)
A = ['AB,CC', 'C,ABB,D', 'CAD', 'CA,D', 'AA,,D']
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_to_numeric(self):
def test_impl(df):
B = pd.to_numeric(df.A, errors='coerce')
return B
df = pd.DataFrame({'A': ['123.1', '331.2']})
hpat_func = self.jit(locals={'B': types.float64[:]})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_1D_Var_len(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n) + 1.0})
df1 = df[df.A > 5]
return len(df1.B)
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_rolling1(self):
# size 3 without unroll
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n), 'B': np.random.ranf(n)})
Ac = df.A.rolling(3).sum()
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
# size 7 with unroll
def test_impl_2(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.random.ranf(n)})
Ac = df.A.rolling(7).sum()
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_rolling2(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
df['moving average'] = df.A.rolling(window=5, center=True).mean()
return df['moving average'].sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_rolling3(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
Ac = df.A.rolling(3, center=True).apply(lambda a: a[0] + 2 * a[1] + a[2])
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_shift1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.random.ranf(n)})
Ac = df.A.shift(1)
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_shift2(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.random.ranf(n)})
Ac = df.A.pct_change(1)
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 11
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_df_input(self):
def test_impl(df):
return df.B.sum()
n = 121
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(df), test_impl(df)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_df_input2(self):
def test_impl(df):
C = df.B == 'two'
return C.sum()
n = 11
df = pd.DataFrame({'A': np.random.ranf(3 * n), 'B': ['one', 'two', 'three'] * n})
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(df), test_impl(df)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_df_input_dist1(self):
def test_impl(df):
return df.B.sum()
n = 121
A = [3, 4, 5, 6, 1]
B = [5, 6, 2, 1, 3]
n = 5
start, end = get_start_end(n)
df = pd.DataFrame({'A': A, 'B': B})
df_h = pd.DataFrame({'A': A[start:end], 'B': B[start:end]})
hpat_func = self.jit(distributed={'df'})(test_impl)
np.testing.assert_almost_equal(hpat_func(df_h), test_impl(df))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_concat(self):
def test_impl(n):
df1 = pd.DataFrame({'key1': np.arange(n), 'A': np.arange(n) + 1.0})
df2 = pd.DataFrame({'key2': n - np.arange(n), 'A': n + np.arange(n) + 1.0})
df3 = pd.concat([df1, df2])
return df3.A.sum() + df3.key2.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
n = 11111
self.assertEqual(hpat_func(n), test_impl(n)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_concat_str(self):
def test_impl():
df1 = pq.read_table('example.parquet').to_pandas()
df2 = pq.read_table('example.parquet').to_pandas()
A3 = pd.concat([df1, df2])
return (A3.two == 'foo').sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_concat_series(self):
def test_impl(n):
df1 = pd.DataFrame({'key1': np.arange(n), 'A': np.arange(n) + 1.0})
df2 = pd.DataFrame({'key2': n - np.arange(n), 'A': n + np.arange(n) + 1.0})
A3 = pd.concat([df1.A, df2.A])
return A3.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
n = 11111
self.assertEqual(hpat_func(n), test_impl(n)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_concat_series_str(self):
def test_impl():
df1 = pq.read_table('example.parquet').to_pandas()
df2 = pq.read_table('example.parquet').to_pandas()
A3 = pd.concat([df1.two, df2.two])
return (A3 == 'foo').sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_intraday(self):
def test_impl(nsyms):
max_num_days = 100
all_res = 0.0
for i in sdc.prange(nsyms):
s_open = 20 * np.ones(max_num_days)
s_low = 28 * np.ones(max_num_days)
s_close = 19 * np.ones(max_num_days)
df = pd.DataFrame({'Open': s_open, 'Low': s_low, 'Close': s_close})
df['Stdev'] = df['Close'].rolling(window=90).std()
df['Moving Average'] = df['Close'].rolling(window=20).mean()
df['Criteria1'] = (df['Open'] - df['Low'].shift(1)) < -df['Stdev']
df['Criteria2'] = df['Open'] > df['Moving Average']
df['BUY'] = df['Criteria1'] & df['Criteria2']
df['Pct Change'] = (df['Close'] - df['Open']) / df['Open']
df['Rets'] = df['Pct Change'][df['BUY']]
all_res += df['Rets'].mean()
return all_res
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_OneDs(), 0)
self.assertEqual(count_parfor_OneDs(), 1) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def test_var_dist1(self):
def test_impl(A, B):
df = pd.DataFrame({'A': A, 'B': B})
df2 = df.groupby('A', as_index=False)['B'].sum()
# TODO: fix handling of df setitem to force match of array dists
# probably with a new node that is appended to the end of basic block
# df2['C'] = np.full(len(df2.B), 3, np.int8)
# TODO: full_like for Series
df2['C'] = np.full_like(df2.B.values, 3, np.int8)
return df2
A = np.array([1, 1, 2, 3])
B = np.array([3, 4, 5, 6])
hpat_func = self.jit(locals={'A:input': 'distributed',
'B:input': 'distributed', 'df2:return': 'distributed'})(test_impl)
start, end = get_start_end(len(A))
df2 = hpat_func(A[start:end], B[start:end])
# TODO:
# pd.testing.assert_frame_equal(
# hpat_func(A[start:end], B[start:end]), test_impl(A, B)) | IntelLabs/hpat | [
645,
65,
645,
54,
1496336381
] |
def _create_3d_axis():
"""creates a subplot with 3d projection if one does not already exist"""
from matplotlib.projections import get_projection_class
from matplotlib import _pylab_helpers
create_axis = True
if _pylab_helpers.Gcf.get_active() is not None:
if isinstance(plt.gca(), get_projection_class('3d')):
create_axis = False
if create_axis:
plt.figure()
plt.subplot(111, projection='3d') | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def __phasor_plot(ax, up, idq, uxdq):
uref = max(up, uxdq[0])
uxd = uxdq[0]/uref
uxq = uxdq[1]/uref
u1d, u1q = (uxd, 1+uxq)
u1 = np.sqrt(u1d**2 + u1q**2)*uref
i1 = np.linalg.norm(idq)
i1d, i1q = (idq[0]/i1, idq[1]/i1)
qhw = 6 # width arrow head
qhl = 15 # length arrow head
qlw = 2 # line width
qts = 10 # textsize
# Length of the Current adjust to Ud: Initally 0.9, Maier(Oswald) = 0.5
curfac = max(0.9, 1.5*i1q/up)
def label_line(ax, X, Y, U, V, label, color='k', size=8):
"""Add a label to a line, at the proper angle.
Arguments
---------
line : matplotlib.lines.Line2D object,
label : str
x : float
x-position to place center of text (in data coordinated
y : float
y-position to place center of text (in data coordinates)
color : str
size : float
"""
x1, x2 = X, X + U
y1, y2 = Y, Y + V
if y2 == 0:
y2 = y1
if x2 == 0:
x2 = x1
x = (x1 + x2) / 2
y = (y1 + y2) / 2
slope_degrees = np.rad2deg(np.angle(U + V * 1j))
if slope_degrees < 0:
slope_degrees += 180
if 90 < slope_degrees <= 270:
slope_degrees += 180
x_offset = np.sin(np.deg2rad(slope_degrees))
y_offset = np.cos(np.deg2rad(slope_degrees))
bbox_props = dict(boxstyle="Round4, pad=0.1", fc="white", lw=0)
text = ax.annotate(label, xy=(x, y), xytext=(x_offset * 10, y_offset * 8),
textcoords='offset points',
size=size, color=color,
horizontalalignment='center',
verticalalignment='center',
fontfamily='monospace', fontweight='bold', bbox=bbox_props)
text.set_rotation(slope_degrees)
return text
if ax == 0:
ax = plt.gca()
ax.axes.xaxis.set_ticklabels([])
ax.axes.yaxis.set_ticklabels([])
# ax.set_aspect('equal')
ax.set_title(
r'$U_1$={0} V, $I_1$={1} A, $U_p$={2} V'.format(
round(u1, 1), round(i1, 1), round(up, 1)), fontsize=14)
up /= uref
ax.quiver(0, 0, 0, up, angles='xy', scale_units='xy', scale=1, units='dots',
headwidth=qhw/2, headlength=qhl/2, headaxislength=qhl/2, width=qlw*2, color='k')
label_line(ax, 0, 0, 0, up, '$U_p$', 'k', qts)
ax.quiver(0, 0, u1d, u1q, angles='xy', scale_units='xy', scale=1, units='dots',
headwidth=qhw, headlength=qhl, headaxislength=qhl, width=qlw, color='r')
label_line(ax, 0, 0, u1d, u1q, '$U_1$', 'r', qts)
ax.quiver(0, 1, uxd, 0, angles='xy', scale_units='xy', scale=1, units='dots',
headwidth=qhw, headlength=qhl, headaxislength=qhl, width=qlw, color='g')
label_line(ax, 0, 1, uxd, 0, '$U_d$', 'g', qts)
ax.quiver(uxd, 1, 0, uxq, angles='xy', scale_units='xy', scale=1, units='dots',
headwidth=qhw, headlength=qhl, headaxislength=qhl, width=qlw, color='g')
label_line(ax, uxd, 1, 0, uxq, '$U_q$', 'g', qts)
ax.quiver(0, 0, curfac*i1d, curfac*i1q, angles='xy', scale_units='xy', scale=1,
units='dots', headwidth=qhw, headlength=qhl, headaxislength=qhl, width=qlw, color='b')
label_line(ax, 0, 0, curfac*i1d, curfac*i1q, '$I_1$', 'b', qts)
xmin, xmax = (min(0, uxd, i1d), max(0, i1d, uxd))
ymin, ymax = (min(0, i1q, 1-uxq), max(1, i1q, 1+uxq))
ax.set_xlim([xmin-0.1, xmax+0.1])
ax.set_ylim([ymin-0.1, ymax+0.1])
ax.grid(True) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def iqd_phasor(up, iqd, uqd, ax=0):
"""creates a phasor plot
up: internal voltage
iqd: current
uqd: terminal voltage"""
uxdq = (uqd[1]/np.sqrt(2), (uqd[0]/np.sqrt(2)-up))
__phasor_plot(ax, up, (iqd[1]/np.sqrt(2), iqd[0]/np.sqrt(2)), uxdq) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def airgap(airgap, ax=0):
"""creates plot of flux density in airgap"""
if ax == 0:
ax = plt.gca()
ax.set_title('Airgap Flux Density [T]')
ax.plot(airgap['pos'], airgap['B'],
label='Max {:4.2f} T'.format(max(airgap['B'])))
ax.plot(airgap['pos'], airgap['B_fft'],
label='Base Ampl {:4.2f} T'.format(airgap['Bamp']))
ax.set_xlabel('Position/°')
ax.legend()
ax.grid(True) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def torque(pos, torque, ax=0):
"""creates plot from torque vs position"""
k = 20
alpha = np.linspace(pos[0], pos[-1],
k*len(torque))
f = ip.interp1d(pos, torque, kind='quadratic')
unit = 'Nm'
scale = 1
if np.min(torque) < -9.9e3 or np.max(torque) > 9.9e3:
scale = 1e-3
unit = 'kNm'
if ax == 0:
ax = plt.gca()
ax.set_title('Torque / {}'.format(unit))
ax.grid(True)
ax.plot(pos, [scale*t for t in torque], 'go')
ax.plot(alpha, scale*f(alpha))
if np.min(torque) > 0 and np.max(torque) > 0:
ax.set_ylim(bottom=0)
elif np.min(torque) < 0 and np.max(torque) < 0:
ax.set_ylim(top=0) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def force(title, pos, force, xlabel='', ax=0):
"""plot force vs position"""
unit = 'N'
scale = 1
if min(force) < -9.9e3 or max(force) > 9.9e3:
scale = 1e-3
unit = 'kN'
if ax == 0:
ax = plt.gca()
ax.set_title('{} / {}'.format(title, unit))
ax.grid(True)
ax.plot(pos, [scale*f for f in force])
if xlabel:
ax.set_xlabel(xlabel)
if min(force) > 0:
ax.set_ylim(bottom=0) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def forcedens(title, pos, fdens, ax=0):
"""plot force densities"""
if ax == 0:
ax = plt.gca()
ax.set_title(title)
ax.grid(True)
ax.plot(pos, [1e-3*ft for ft in fdens[0]], label='F tang')
ax.plot(pos, [1e-3*fn for fn in fdens[1]], label='F norm')
ax.legend()
ax.set_xlabel('Pos / deg')
ax.set_ylabel('Force Density / kN/m²') | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def forcedens_fft(title, fdens, ax=0):
"""plot force densities FFT
Args:
title: plot title
fdens: force density object
"""
if ax == 0:
ax = plt.axes(projection="3d")
F = 1e-3*fdens.fft()
fmin = 0.2
num_bars = F.shape[0] + 1
_xx, _yy = np.meshgrid(np.arange(1, num_bars),
np.arange(1, num_bars))
z_size = F[F > fmin]
x_pos, y_pos = _xx[F > fmin], _yy[F > fmin]
z_pos = np.zeros_like(z_size)
x_size = 2
y_size = 2
ax.bar3d(x_pos, y_pos, z_pos, x_size, y_size, z_size)
ax.view_init(azim=120)
ax.set_xlim(0, num_bars+1)
ax.set_ylim(0, num_bars+1)
ax.set_title(title)
ax.set_xlabel('M')
ax.set_ylabel('N')
ax.set_zlabel('kN/m²') | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def winding_current(pos, current, ax=0):
"""plot winding currents"""
if ax == 0:
ax = plt.gca()
ax.set_title('Winding Currents / A')
ax.grid(True)
for p, i in zip(pos, current):
ax.plot(p, i) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def voltage_fft(title, order, voltage, ax=0):
"""plot FFT harmonics of voltage"""
if ax == 0:
ax = plt.gca()
ax.set_title('{} / V'.format(title))
ax.grid(True)
if max(order) < 5:
order += [5]
voltage += [0]
try:
bw = 2.5E-2*max(order)
ax.bar(order, voltage, width=bw, align='center')
except ValueError: # empty sequence
pass | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def mcv_muer(mcv, ax=0):
"""plot rel. permeability vs. B of mcv dict"""
MUE0 = 4e-7*np.pi
bi, ur = zip(*[(bx, bx/hx/MUE0)
for bx, hx in zip(mcv['curve'][0]['bi'],
mcv['curve'][0]['hi']) if not hx == 0])
if ax == 0:
ax = plt.gca()
ax.plot(bi, ur)
ax.set_xlabel('B / T')
ax.set_title('rel. Permeability')
ax.grid() | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def mtpv(pmrel, u1max, i1max, title='', projection='', ax=0):
"""create a line or surface plot with voltage and mtpv curve"""
w1 = pmrel.w2_imax_umax(i1max, u1max)
nsamples = 20
if projection == '3d':
nsamples = 50
iqmax, idmax = pmrel.iqdmax(i1max)
iqmin, idmin = pmrel.iqdmin(i1max)
id = np.linspace(idmin, idmax, nsamples)
iq = np.linspace(iqmin, iqmax, nsamples)
u1_iqd = np.array(
[[np.linalg.norm(pmrel.uqd(w1, iqx, idx))/np.sqrt(2)
for idx in id] for iqx in iq])
u1 = np.mean(u1_iqd)
imtpv = np.array([pmrel.mtpv(wx, u1, i1max)
for wx in np.linspace(w1, 20*w1, nsamples)]).T
if projection == '3d':
torque_iqd = np.array(
[[pmrel.torque_iqd(x, y)
for y in id] for x in iq])
ax = idq_torque(id, iq, torque_iqd, ax)
ax.plot(imtpv[1], imtpv[0], imtpv[2],
color='red', linewidth=2)
else:
if ax == 0:
ax = plt.gca()
ax.set_aspect('equal')
x, y = np.meshgrid(id, iq)
CS = ax.contour(x, y, u1_iqd, 4, colors='b') # linestyles='dashed')
ax.clabel(CS, fmt='%d', inline=1)
ax.plot(imtpv[1], imtpv[0],
color='red', linewidth=2,
label='MTPV: {0:5.0f} Nm'.format(np.max(imtpv[2])))
# beta = np.arctan2(imtpv[1][0], imtpv[0][0])
# b = np.linspace(beta, 0)
# ax.plot(np.sqrt(2)*i1max*np.sin(b), np.sqrt(2)*i1max*np.cos(b), 'r-')
ax.grid()
ax.legend()
ax.set_xlabel('Id/A')
ax.set_ylabel('Iq/A')
if title:
ax.set_title(title) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def pmrelsim(bch, title=''):
"""creates a plot of a PM/Rel motor simulation"""
cols = 2
rows = 4
if len(bch.flux['1']) > 1:
rows += 1
htitle = 1.5 if title else 0
fig, ax = plt.subplots(nrows=rows, ncols=cols,
figsize=(10, 3*rows + htitle))
if title:
fig.suptitle(title, fontsize=16)
row = 1
plt.subplot(rows, cols, row)
if bch.torque:
torque(bch.torque[-1]['angle'], bch.torque[-1]['torque'])
plt.subplot(rows, cols, row+1)
tq = list(bch.torque_fft[-1]['torque'])
order = list(bch.torque_fft[-1]['order'])
if order and max(order) < 5:
order += [15]
tq += [0]
torque_fft(order, tq)
plt.subplot(rows, cols, row+2)
force('Force Fx',
bch.torque[-1]['angle'], bch.torque[-1]['force_x'])
plt.subplot(rows, cols, row+3)
force('Force Fy',
bch.torque[-1]['angle'], bch.torque[-1]['force_y'])
row += 3
elif bch.linearForce:
title, keys = __get_linearForce_title_keys(bch.linearForce[-1])
force(title[0], bch.linearForce[-1]['displ'],
bch.linearForce[-1][keys[0]], 'Displt. / mm')
plt.subplot(rows, cols, row+1)
force_fft(bch.linearForce_fft[-2]['order'],
bch.linearForce_fft[-2]['force'])
plt.subplot(rows, cols, row+2)
force(title[1], bch.linearForce[-1]['displ'],
bch.linearForce[-1][keys[1]], 'Displt. / mm')
plt.subplot(rows, cols, row+3)
force_fft(bch.linearForce_fft[-1]['order'],
bch.linearForce_fft[-1]['force'])
row += 3
plt.subplot(rows, cols, row+1)
flux = [bch.flux[k][-1] for k in bch.flux]
pos = [f['displ'] for f in flux]
winding_flux(pos,
[f['flux_k'] for f in flux])
plt.subplot(rows, cols, row+2)
winding_current(pos,
[f['current_k'] for f in flux])
plt.subplot(rows, cols, row+3)
voltage('Internal Voltage',
bch.flux['1'][-1]['displ'],
bch.flux['1'][-1]['voltage_dpsi'])
plt.subplot(rows, cols, row+4)
try:
voltage_fft('Internal Voltage Harmonics',
bch.flux_fft['1'][-1]['order'],
bch.flux_fft['1'][-1]['voltage'])
except:
pass
if len(bch.flux['1']) > 1:
plt.subplot(rows, cols, row+5)
voltage('No Load Voltage',
bch.flux['1'][0]['displ'],
bch.flux['1'][0]['voltage_dpsi'])
plt.subplot(rows, cols, row+6)
try:
voltage_fft('No Load Voltage Harmonics',
bch.flux_fft['1'][0]['order'],
bch.flux_fft['1'][0]['voltage'])
except:
pass
fig.tight_layout(h_pad=3.5)
if title:
fig.subplots_adjust(top=0.92) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def fasttorque(bch, title=''):
"""creates a plot of a Fast Torque simulation"""
cols = 2
rows = 4
if len(bch.flux['1']) > 1:
rows += 1
htitle = 1.5 if title else 0
fig, ax = plt.subplots(nrows=rows, ncols=cols,
figsize=(10, 3*rows + htitle))
if title:
fig.suptitle(title, fontsize=16)
row = 1
plt.subplot(rows, cols, row)
if bch.torque:
torque(bch.torque[-1]['angle'], bch.torque[-1]['torque'])
plt.subplot(rows, cols, row+1)
torque_fft(bch.torque_fft[-1]['order'], bch.torque_fft[-1]['torque'])
plt.subplot(rows, cols, row+2)
force('Force Fx',
bch.torque[-1]['angle'], bch.torque[-1]['force_x'])
plt.subplot(rows, cols, row+3)
force('Force Fy',
bch.torque[-1]['angle'], bch.torque[-1]['force_y'])
row += 3
elif bch.linearForce:
title, keys = __get_linearForce_title_keys(bch.linearForce[-1])
force(title[0], bch.linearForce[-1]['displ'],
bch.linearForce[-1][keys[0]], 'Displt. / mm')
plt.subplot(rows, cols, row+1)
force_fft(bch.linearForce_fft[-2]['order'],
bch.linearForce_fft[-2]['force'])
plt.subplot(rows, cols, row+2)
force(title[1], bch.linearForce[-1]['displ'],
bch.linearForce[-1][keys[1]], 'Displt. / mm')
plt.subplot(rows, cols, row+3)
force_fft(bch.linearForce_fft[-1]['order'],
bch.linearForce_fft[-1]['force'])
row += 3
plt.subplot(rows, cols, row+1)
flux = [bch.flux[k][-1] for k in bch.flux]
pos = [f['displ'] for f in flux]
winding_flux(pos, [f['flux_k'] for f in flux])
plt.subplot(rows, cols, row+2)
winding_current(pos, [f['current_k'] for f in flux])
plt.subplot(rows, cols, row+3)
voltage('Internal Voltage',
bch.flux['1'][-1]['displ'],
bch.flux['1'][-1]['voltage_dpsi'])
plt.subplot(rows, cols, row+4)
try:
voltage_fft('Internal Voltage Harmonics',
bch.flux_fft['1'][-1]['order'],
bch.flux_fft['1'][-1]['voltage'])
except:
pass
if len(bch.flux['1']) > 1:
plt.subplot(rows, cols, row+5)
voltage('No Load Voltage',
bch.flux['1'][0]['displ'],
bch.flux['1'][0]['voltage_dpsi'])
plt.subplot(rows, cols, row+6)
try:
voltage_fft('No Load Voltage Harmonics',
bch.flux_fft['1'][0]['order'],
bch.flux_fft['1'][0]['voltage'])
except:
pass
fig.tight_layout(h_pad=3.5)
if title:
fig.subplots_adjust(top=0.92) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def transientsc(bch, title=''):
"""creates a transient short circuit plot"""
cols = 1
rows = 2
htitle = 1.5 if title else 0
fig, ax = plt.subplots(nrows=rows, ncols=cols,
figsize=(10, 3*rows + htitle))
if title:
fig.suptitle(title, fontsize=16)
row = 1
plt.subplot(rows, cols, row)
ax = plt.gca()
ax.set_title('Currents / A')
ax.grid(True)
for i in ('ia', 'ib', 'ic'):
ax.plot(bch.scData['time'], bch.scData[i], label=i)
ax.set_xlabel('Time / s')
ax.legend()
row = 2
plt.subplot(rows, cols, row)
ax = plt.gca()
ax.set_title('Torque / Nm')
ax.grid(True)
ax.plot(bch.scData['time'], bch.scData['torque'])
ax.set_xlabel('Time / s')
fig.tight_layout(h_pad=2)
if title:
fig.subplots_adjust(top=0.92) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def i1beta_ld(i1, beta, ld, ax=0):
"""creates a surface plot of ld vs i1, beta"""
if ax == 0:
_create_3d_axis()
ax = plt.gca()
_plot_surface(ax, i1, beta, np.asarray(ld)*1e3,
(u'I1/A', u'Beta/°', u'Ld/mH'),
azim=60) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def i1beta_psim(i1, beta, psim, ax=0):
"""creates a surface plot of psim vs i1, beta"""
if ax == 0:
_create_3d_axis()
ax = plt.gca()
_plot_surface(ax, i1, beta, psim,
(u'I1/A', u'Beta/°', u'Psi m/Vs'),
azim=60) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |
def i1beta_psid(i1, beta, psid, ax=0):
"""creates a surface plot of psid vs i1, beta"""
if ax == 0:
_create_3d_axis()
ax = plt.gca()
azim = -60
if 0 < np.mean(beta) or -90 > np.mean(beta):
azim = 60
_plot_surface(ax, i1, beta, psid,
(u'I1/A', u'Beta/°', u'Psi d/Vs'),
azim=azim) | SEMAFORInformatik/femagtools | [
17,
10,
17,
4,
1471511034
] |