code
stringlengths
3
1.01M
repo_name
stringlengths
5
116
path
stringlengths
3
311
language
stringclasses
30 values
license
stringclasses
15 values
size
int64
3
1.01M
<?php // This file is part of BOINC. // http://boinc.berkeley.edu // Copyright (C) 2014 University of California // // BOINC is free software; you can redistribute it and/or modify it // under the terms of the GNU Lesser General Public License // as published by the Free Software Foundation, // either version 3 of the License, or (at your option) any later version. // // BOINC is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. // See the GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with BOINC. If not, see <http://www.gnu.org/licenses/>. require_once("../inc/boinc_db.inc"); require_once("../inc/util.inc"); require_once("../inc/email.inc"); require_once("../inc/team.inc"); $xml = get_int('xml', true); if ($xml) { require_once("../inc/xml.inc"); } if (DISABLE_TEAMS) { if ($xml) { xml_error(-1, "Teams are disabled"); } else { error_page("Teams are disabled"); } } if ($xml) { $creditonly = get_int('creditonly', true); xml_header(); $retval = db_init_xml(); if ($retval) xml_error($retval); $teamid = get_int("teamid"); $team = BoincTeam::lookup_id($teamid); if (!$team) { xml_error(ERR_DB_NOT_FOUND); } $account_key = get_str('account_key', true); $user = BoincUser::lookup_auth($account_key); $show_email = ($user && is_team_founder($user, $team)); echo "<users>\n"; $users = BoincUser::enum_fields("id, email_addr, send_email, name, total_credit, expavg_credit, expavg_time, has_profile, donated, country, cross_project_id, create_time, url", "teamid=$team->id"); //$users = BoincUser::enum("teamid=$team->id"); foreach($users as $user) { show_team_member($user, $show_email, $creditonly); } echo "</users>\n"; exit(); } $user = get_logged_in_user(); $teamid = get_int("teamid"); $plain = get_int("plain", true); $team = BoincTeam::lookup_id($teamid); if (!$team) error_page(tra("no such team")); require_founder_login($user, $team); if ($plain) { header("Content-type: text/plain"); } else { page_head(tra("%1 Email List", $team->name)); start_table(); table_header(array(tra("Member list of %1", $team->name), "colspan=\"6\"")); table_header(tra("Name"), tra("Email address"), tra("Total credit"), tra("Recent average credit"), tra("Country")); } $users = BoincUser::enum_fields("id, email_addr, send_email, name, total_credit, expavg_credit, has_profile, donated, country, cross_project_id, create_time, url", "teamid=$team->id"); foreach($users as $user) { if ($plain) { $e = $user->send_email?"<$user->email_addr>":""; echo "$user->name $e\n"; } else { $e = $user->send_email?"$user->email_addr":""; table_row(user_links($user, BADGE_HEIGHT_MEDIUM), $e, format_credit($user->total_credit), format_credit($user->expavg_credit), $user->country); } } if (!$plain) { end_table(); echo "<p><a href=\"team_email_list.php?teamid=".$teamid."&amp;plain=1\">".tra("Show as plain text")."</a></p>"; page_tail(); } $cvs_version_tracker[]="\$Id$"; //Generated automatically - do not edit ?>
Simek/boinc
html/user/team_email_list.php
PHP
gpl-3.0
3,318
#Region "Microsoft.VisualBasic::d865566954cecb016e8a9789c08f7908, Data_science\Visualization\Canvas3D\Device\Mouse.vb" ' Author: ' ' asuka (amethyst.asuka@gcmodeller.org) ' xie (genetics@smrucc.org) ' xieguigang (xie.guigang@live.com) ' ' Copyright (c) 2018 GPL3 Licensed ' ' ' GNU GENERAL PUBLIC LICENSE (GPL3) ' ' ' This program is free software: you can redistribute it and/or modify ' it under the terms of the GNU General Public License as published by ' the Free Software Foundation, either version 3 of the License, or ' (at your option) any later version. ' ' This program is distributed in the hope that it will be useful, ' but WITHOUT ANY WARRANTY; without even the implied warranty of ' MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ' GNU General Public License for more details. ' ' You should have received a copy of the GNU General Public License ' along with this program. If not, see <http://www.gnu.org/licenses/>. ' /********************************************************************************/ ' Summaries: ' Class Mouse ' ' Constructor: (+1 Overloads) Sub New ' Sub: device_MouseDown, device_MouseMove, device_MouseUp ' ' ' /********************************************************************************/ #End Region Imports Microsoft.VisualBasic.Imaging.Drawing3D Namespace Drawing3D.Device Public Class Mouse : Inherits IDevice(Of UserControl) Dim press As Boolean Dim oldXY As Point Dim camera As Camera Public Sub New(dev As UserControl, camera As Camera) MyBase.New(dev) Me.camera = camera End Sub Private Sub device_MouseDown(sender As Object, e As MouseEventArgs) Handles device.MouseDown press = True oldXY = e.Location End Sub Private Sub device_MouseMove(sender As Object, e As MouseEventArgs) Handles device.MouseMove Dim xy = e.Location If Not press Then Return End If If e.Button = MouseButtons.Left Then ' 左键旋转 If xy.X > oldXY.X Then ' right camera.angleY += 1 End If If xy.X < oldXY.X Then ' left camera.angleY -= 1 End If If xy.Y > oldXY.Y Then ' down 'device._camera.angleZ -= 1 camera.angleX -= 1 End If If xy.Y < oldXY.Y Then ' up 'device._camera.angleZ += 1 camera.angleX += 1 End If ElseIf e.Button = MouseButtons.Right Then ' 右键进行位移 Dim dx = xy.X - oldXY.X Dim dy = xy.Y - oldXY.Y camera.offset = New PointF With { .X = camera.offset.X + dx, .Y = camera.offset.Y + dy } Else End If oldXY = xy End Sub Private Sub device_MouseUp(sender As Object, e As MouseEventArgs) Handles device.MouseUp press = False End Sub End Class End Namespace
SMRUCC/GCModeller
src/runtime/sciBASIC#/Data_science/Visualization/Canvas3D/Device/Mouse.vb
Visual Basic
gpl-3.0
3,384
/***************************************************************************** * FullFAT - High Performance, Thread-Safe Embedded FAT File-System * * Copyright (C) 2009 James Walmsley (james@worm.me.uk) * * * * This program is free software: you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation, either version 3 of the License, or * * (at your option) any later version. * * * * This program is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU General Public License for more details. * * * * You should have received a copy of the GNU General Public License * * along with this program. If not, see <http://www.gnu.org/licenses/>. * * * * IMPORTANT NOTICE: * * ================= * * Alternative Licensing is available directly from the Copyright holder, * * (James Walmsley). For more information consult LICENSING.TXT to obtain * * a Commercial license. * * * * See RESTRICTIONS.TXT for extra restrictions on the use of FullFAT. * * * * Removing the above notice is illegal and will invalidate this license. * ***************************************************************************** * See http://worm.me.uk/fullfat for more information. * * Or http://fullfat.googlecode.com/ for latest releases and the wiki. * *****************************************************************************/ #ifndef _CMD_H_ #define _CMD_H_ #include "../../../../src/fullfat.h" #include <FFTerm.h> typedef struct { // Provides an environment for the FullFAT commands. FF_IOMAN *pIoman; FF_T_INT8 WorkingDir[FF_MAX_PATH]; // A working directory Environment variable. } FF_ENVIRONMENT; void ProcessPath(char *dest, char *src, FF_ENVIRONMENT *pEnv); extern const FFT_ERR_TABLE promptInfo[]; extern const FFT_ERR_TABLE pwdInfo[]; extern const FFT_ERR_TABLE lsInfo[]; extern const FFT_ERR_TABLE cdInfo[]; extern const FFT_ERR_TABLE md5Info[]; extern const FFT_ERR_TABLE cpInfo[]; extern const FFT_ERR_TABLE icpInfo[]; extern const FFT_ERR_TABLE xcpInfo[]; extern const FFT_ERR_TABLE mkdirInfo[]; extern const FFT_ERR_TABLE infoInfo[]; extern const FFT_ERR_TABLE mountInfo[]; extern const FFT_ERR_TABLE viewInfo[]; extern const FFT_ERR_TABLE rmInfo[]; extern const FFT_ERR_TABLE mkimgInfo[]; extern const FFT_ERR_TABLE mkfileInfo[]; extern const FFT_ERR_TABLE mkwinfileInfo[]; extern const FFT_ERR_TABLE exitInfo[]; extern const FFT_ERR_TABLE md5winInfo[]; extern const FFT_ERR_TABLE hexviewInfo[]; extern const FFT_ERR_TABLE runInfo[]; extern const FFT_ERR_TABLE timeInfo[]; extern const FFT_ERR_TABLE dateInfo[]; extern const FFT_ERR_TABLE moveInfo[]; extern const FFT_ERR_TABLE drivelistInfo[]; int cmd_prompt (int argc, char **argv, FF_ENVIRONMENT *pEnv); int pwd_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int ls_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int cd_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int md5_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int cp_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int icp_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int xcp_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int mkdir_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int info_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int mount_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int view_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int rm_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int move_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int mkimg_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int mkfile_cmd (int argc, char **argv, FF_ENVIRONMENT *pEnv); int mkwinfile_cmd (int argc, char **argv); int exit_cmd (int argc, char **argv); int run_cmd (int argc, char **argv); int time_cmd (int argc, char **argv); int date_cmd (int argc, char **argv); int md5win_cmd (int argc, char **argv); int hexview_cmd (int argc, char **argv); int drivelist_cmd (int argc, char **argv); #endif
piranna/fullfat
Demo/FullFAT32.dll/FullFAT32.dll/FullFAT32.dll/cmd.h
C
gpl-3.0
4,981
/* * Copyright (c) 2011-2012 Yaroslav Stavnichiy <yarosla@gmail.com> * * This file is part of NXWEB. * * NXWEB is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation, either version 3 * of the License, or (at your option) any later version. * * NXWEB is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with NXWEB. If not, see <http://www.gnu.org/licenses/>. */ #ifndef NX_ALLOC_H #define NX_ALLOC_H #ifdef __cplusplus extern "C" { #endif #include <malloc.h> #define MEM_GUARD 64 #define nx_alloc(size) memalign(MEM_GUARD, (size)+MEM_GUARD) #define nx_calloc(size) ({void* _pTr=memalign(MEM_GUARD, (size)+MEM_GUARD); memset(_pTr, 0, (size)); _pTr;}) #define nx_free(ptr) free(ptr) #ifdef __cplusplus } #endif #endif /* NX_ALLOC_H */
kevinhwm/adfs-lite
src/include/nxweb/nx_alloc.h
C
gpl-3.0
1,137
using System; using kOS.Safe.Encapsulation.Suffixes; using kOS.Safe.Exceptions; using System.Linq; using System.Reflection; using System.Collections.Generic; using kOS.Safe.Encapsulation; namespace kOS.Suffixed.PartModuleField { [kOS.Safe.Utilities.KOSNomenclature("ScienceExperimentModule")] public abstract class ScienceExperimentFields : PartModuleFields { protected global::Part part; protected IScienceDataContainer container; public ScienceExperimentFields(PartModule module, SharedObjects shared) : base(module, shared) { this.container = module as IScienceDataContainer; part = module.part; if (container == null) { throw new KOSException("This module is not a science data container"); } InitializeSuffixes(); } private void InitializeSuffixes() { AddSuffix("DEPLOY", new NoArgsVoidSuffix(DeployExperiment, "Deploy and run this experiment")); AddSuffix("RESET", new NoArgsVoidSuffix(ResetExperiment, "Reset this experiment")); AddSuffix("TRANSMIT", new NoArgsVoidSuffix(TransmitData, "Transmit experiment data back to Kerbin")); AddSuffix("DUMP", new NoArgsVoidSuffix(DumpData, "Dump experiment data")); AddSuffix("INOPERABLE", new Suffix<BooleanValue>(() => Inoperable(), "Is this experiment inoperable")); AddSuffix("DEPLOYED", new Suffix<BooleanValue>(() => Deployed(), "Is this experiment deployed")); AddSuffix("RERUNNABLE", new Suffix<BooleanValue>(() => Rerunnable(), "Is this experiment rerunnable")); AddSuffix("HASDATA", new Suffix<BooleanValue>(() => HasData(), "Does this experiment have any data stored")); AddSuffix("DATA", new Suffix<ListValue>(Data, "Does this experiment have any data stored")); } public abstract bool Deployed(); public abstract bool Inoperable(); public abstract void DeployExperiment(); public abstract void ResetExperiment(); public virtual bool Rerunnable() { return container.IsRerunnable(); } public virtual bool HasData() { return container.GetData().Any(); } public virtual ListValue Data() { return new ListValue(container.GetData().Select(s => new ScienceDataValue(s, part)).Cast<Structure>()); } public virtual void DumpData() { ThrowIfNotCPUVessel(); Array.ForEach(container.GetData(), (d) => container.DumpData(d)); } public abstract void TransmitData(); public new string ToString() { return "SCIENCE EXPERIMENT"; } } }
theodoregoetz/KOS
src/kOS/Suffixed/PartModuleField/ScienceExperimentFields.cs
C#
gpl-3.0
2,802
#!/usr/bin/env bash set -eux platform="$(uname)" function setup() { if [[ "${platform}" == "FreeBSD" ]] || [[ "${platform}" == "Darwin" ]]; then ifconfig lo0 existing=$(ifconfig lo0 | grep '^[[:blank:]]inet 127\.0\.0\. ' || true) echo "${existing}" for i in 3 4 254; do ip="127.0.0.${i}" if [[ "${existing}" != *"${ip}"* ]]; then ifconfig lo0 alias "${ip}" up fi done ifconfig lo0 fi } function teardown() { if [[ "${platform}" == "FreeBSD" ]] || [[ "${platform}" == "Darwin" ]]; then for i in 3 4 254; do ip="127.0.0.${i}" if [[ "${existing}" != *"${ip}"* ]]; then ifconfig lo0 -alias "${ip}" fi done ifconfig lo0 fi } setup trap teardown EXIT ANSIBLE_SSH_ARGS='-C -o ControlMaster=auto -o ControlPersist=60s -o UserKnownHostsFile=/dev/null' \ ANSIBLE_HOST_KEY_CHECKING=false ansible-playbook test_delegate_to.yml -i inventory -v "$@" # this test is not doing what it says it does, also relies on var that should not be available #ansible-playbook test_loop_control.yml -v "$@" ansible-playbook test_delegate_to_loop_randomness.yml -v "$@" ansible-playbook delegate_and_nolog.yml -i inventory -v "$@" ansible-playbook delegate_facts_block.yml -i inventory -v "$@" ansible-playbook test_delegate_to_loop_caching.yml -i inventory -v "$@" # ensure we are using correct settings when delegating ANSIBLE_TIMEOUT=3 ansible-playbook delegate_vars_hanldling.yml -i inventory -v "$@" ansible-playbook has_hostvars.yml -i inventory -v "$@" # test ansible_x_interpreter # python source virtualenv.sh ( cd "${OUTPUT_DIR}"/venv/bin ln -s python firstpython ln -s python secondpython ) ansible-playbook verify_interpreter.yml -i inventory_interpreters -v "$@" ansible-playbook discovery_applied.yml -i inventory -v "$@"
jtyr/ansible
test/integration/targets/delegate_to/runme.sh
Shell
gpl-3.0
1,925
/* Profile heap and stack memory usage of running program. Copyright (C) 1998-2016 Free Software Foundation, Inc. This file is part of the GNU C Library. Contributed by Ulrich Drepper <drepper@cygnus.com>, 1998. The GNU C Library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. The GNU C Library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with the GNU C Library; if not, see <http://www.gnu.org/licenses/>. */ #include <assert.h> #include <atomic.h> #include <dlfcn.h> #include <errno.h> #include <fcntl.h> #include <inttypes.h> #include <signal.h> #include <stdarg.h> #include <stdbool.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include <stdint.h> #include <sys/mman.h> #include <sys/time.h> #include <memusage.h> /* Pointer to the real functions. These are determined used `dlsym' when really needed. */ static void *(*mallocp)(size_t); static void *(*reallocp) (void *, size_t); static void *(*callocp) (size_t, size_t); static void (*freep) (void *); static void *(*mmapp) (void *, size_t, int, int, int, off_t); static void *(*mmap64p) (void *, size_t, int, int, int, off64_t); static int (*munmapp) (void *, size_t); static void *(*mremapp) (void *, size_t, size_t, int, void *); enum { idx_malloc = 0, idx_realloc, idx_calloc, idx_free, idx_mmap_r, idx_mmap_w, idx_mmap_a, idx_mremap, idx_munmap, idx_last }; struct header { size_t length; size_t magic; }; #define MAGIC 0xfeedbeaf static memusage_cntr_t calls[idx_last]; static memusage_cntr_t failed[idx_last]; static memusage_size_t total[idx_last]; static memusage_size_t grand_total; static memusage_cntr_t histogram[65536 / 16]; static memusage_cntr_t large; static memusage_cntr_t calls_total; static memusage_cntr_t inplace; static memusage_cntr_t decreasing; static memusage_cntr_t realloc_free; static memusage_cntr_t inplace_mremap; static memusage_cntr_t decreasing_mremap; static memusage_size_t current_heap; static memusage_size_t peak_use[3]; static __thread uintptr_t start_sp; /* A few macros to make the source more readable. */ #define peak_heap peak_use[0] #define peak_stack peak_use[1] #define peak_total peak_use[2] #define DEFAULT_BUFFER_SIZE 32768 static size_t buffer_size; static int fd = -1; static bool not_me; static int initialized; static bool trace_mmap; extern const char *__progname; struct entry { uint64_t heap; uint64_t stack; uint32_t time_low; uint32_t time_high; }; static struct entry buffer[2 * DEFAULT_BUFFER_SIZE]; static uatomic32_t buffer_cnt; static struct entry first; /* Update the global data after a successful function call. */ static void update_data (struct header *result, size_t len, size_t old_len) { if (result != NULL) { /* Record the information we need and mark the block using a magic number. */ result->length = len; result->magic = MAGIC; } /* Compute current heap usage and compare it with the maximum value. */ memusage_size_t heap = catomic_exchange_and_add (&current_heap, len - old_len) + len - old_len; catomic_max (&peak_heap, heap); /* Compute current stack usage and compare it with the maximum value. The base stack pointer might not be set if this is not the main thread and it is the first call to any of these functions. */ if (__glibc_unlikely (!start_sp)) start_sp = GETSP (); uintptr_t sp = GETSP (); #ifdef STACK_GROWS_UPWARD /* This can happen in threads where we didn't catch the thread's stack early enough. */ if (__glibc_unlikely (sp < start_sp)) start_sp = sp; size_t current_stack = sp - start_sp; #else /* This can happen in threads where we didn't catch the thread's stack early enough. */ if (__glibc_unlikely (sp > start_sp)) start_sp = sp; size_t current_stack = start_sp - sp; #endif catomic_max (&peak_stack, current_stack); /* Add up heap and stack usage and compare it with the maximum value. */ catomic_max (&peak_total, heap + current_stack); /* Store the value only if we are writing to a file. */ if (fd != -1) { uatomic32_t idx = catomic_exchange_and_add (&buffer_cnt, 1); if (idx + 1 >= 2 * buffer_size) { /* We try to reset the counter to the correct range. If this fails because of another thread increasing the counter it does not matter since that thread will take care of the correction. */ uatomic32_t reset = (idx + 1) % (2 * buffer_size); catomic_compare_and_exchange_val_acq (&buffer_cnt, reset, idx + 1); if (idx >= 2 * buffer_size) idx = reset - 1; } assert (idx < 2 * DEFAULT_BUFFER_SIZE); buffer[idx].heap = current_heap; buffer[idx].stack = current_stack; GETTIME (buffer[idx].time_low, buffer[idx].time_high); /* Write out buffer if it is full. */ if (idx + 1 == buffer_size) write (fd, buffer, buffer_size * sizeof (struct entry)); else if (idx + 1 == 2 * buffer_size) write (fd, &buffer[buffer_size], buffer_size * sizeof (struct entry)); } } /* Interrupt handler. */ static void int_handler (int signo) { /* Nothing gets allocated. Just record the stack pointer position. */ update_data (NULL, 0, 0); } /* Find out whether this is the program we are supposed to profile. For this the name in the variable `__progname' must match the one given in the environment variable MEMUSAGE_PROG_NAME. If the variable is not present every program assumes it should be profiling. If this is the program open a file descriptor to the output file. We will write to it whenever the buffer overflows. The name of the output file is determined by the environment variable MEMUSAGE_OUTPUT. If the environment variable MEMUSAGE_BUFFER_SIZE is set its numerical value determines the size of the internal buffer. The number gives the number of elements in the buffer. By setting the number to one one effectively selects unbuffered operation. If MEMUSAGE_NO_TIMER is not present an alarm handler is installed which at the highest possible frequency records the stack pointer. */ static void me (void) { const char *env = getenv ("MEMUSAGE_PROG_NAME"); size_t prog_len = strlen (__progname); initialized = -1; mallocp = (void *(*)(size_t))dlsym (RTLD_NEXT, "malloc"); reallocp = (void *(*)(void *, size_t))dlsym (RTLD_NEXT, "realloc"); callocp = (void *(*)(size_t, size_t))dlsym (RTLD_NEXT, "calloc"); freep = (void (*)(void *))dlsym (RTLD_NEXT, "free"); mmapp = (void *(*)(void *, size_t, int, int, int, off_t))dlsym (RTLD_NEXT, "mmap"); mmap64p = (void *(*)(void *, size_t, int, int, int, off64_t))dlsym (RTLD_NEXT, "mmap64"); mremapp = (void *(*)(void *, size_t, size_t, int, void *))dlsym (RTLD_NEXT, "mremap"); munmapp = (int (*)(void *, size_t))dlsym (RTLD_NEXT, "munmap"); initialized = 1; if (env != NULL) { /* Check for program name. */ size_t len = strlen (env); if (len > prog_len || strcmp (env, &__progname[prog_len - len]) != 0 || (prog_len != len && __progname[prog_len - len - 1] != '/')) not_me = true; } /* Only open the file if it's really us. */ if (!not_me && fd == -1) { const char *outname; if (!start_sp) start_sp = GETSP (); outname = getenv ("MEMUSAGE_OUTPUT"); if (outname != NULL && outname[0] != '\0' && (access (outname, R_OK | W_OK) == 0 || errno == ENOENT)) { fd = creat64 (outname, 0666); if (fd == -1) /* Don't do anything in future calls if we cannot write to the output file. */ not_me = true; else { /* Write the first entry. */ first.heap = 0; first.stack = 0; GETTIME (first.time_low, first.time_high); /* Write it two times since we need the starting and end time. */ write (fd, &first, sizeof (first)); write (fd, &first, sizeof (first)); /* Determine the buffer size. We use the default if the environment variable is not present. */ buffer_size = DEFAULT_BUFFER_SIZE; const char *str_buffer_size = getenv ("MEMUSAGE_BUFFER_SIZE"); if (str_buffer_size != NULL) { buffer_size = atoi (str_buffer_size); if (buffer_size == 0 || buffer_size > DEFAULT_BUFFER_SIZE) buffer_size = DEFAULT_BUFFER_SIZE; } /* Possibly enable timer-based stack pointer retrieval. */ if (getenv ("MEMUSAGE_NO_TIMER") == NULL) { struct sigaction act; act.sa_handler = (sighandler_t) &int_handler; act.sa_flags = SA_RESTART; sigfillset (&act.sa_mask); if (sigaction (SIGPROF, &act, NULL) >= 0) { struct itimerval timer; timer.it_value.tv_sec = 0; timer.it_value.tv_usec = 1; timer.it_interval = timer.it_value; setitimer (ITIMER_PROF, &timer, NULL); } } } } if (!not_me && getenv ("MEMUSAGE_TRACE_MMAP") != NULL) trace_mmap = true; } } /* Record the initial stack position. */ static void __attribute__ ((constructor)) init (void) { start_sp = GETSP (); if (!initialized) me (); } /* `malloc' replacement. We keep track of the memory usage if this is the correct program. */ void * malloc (size_t len) { struct header *result = NULL; /* Determine real implementation if not already happened. */ if (__glibc_unlikely (initialized <= 0)) { if (initialized == -1) return NULL; me (); } /* If this is not the correct program just use the normal function. */ if (not_me) return (*mallocp)(len); /* Keep track of number of calls. */ catomic_increment (&calls[idx_malloc]); /* Keep track of total memory consumption for `malloc'. */ catomic_add (&total[idx_malloc], len); /* Keep track of total memory requirement. */ catomic_add (&grand_total, len); /* Remember the size of the request. */ if (len < 65536) catomic_increment (&histogram[len / 16]); else catomic_increment (&large); /* Total number of calls of any of the functions. */ catomic_increment (&calls_total); /* Do the real work. */ result = (struct header *) (*mallocp)(len + sizeof (struct header)); if (result == NULL) { catomic_increment (&failed[idx_malloc]); return NULL; } /* Update the allocation data and write out the records if necessary. */ update_data (result, len, 0); /* Return the pointer to the user buffer. */ return (void *) (result + 1); } /* `realloc' replacement. We keep track of the memory usage if this is the correct program. */ void * realloc (void *old, size_t len) { struct header *result = NULL; struct header *real; size_t old_len; /* Determine real implementation if not already happened. */ if (__glibc_unlikely (initialized <= 0)) { if (initialized == -1) return NULL; me (); } /* If this is not the correct program just use the normal function. */ if (not_me) return (*reallocp)(old, len); if (old == NULL) { /* This is really a `malloc' call. */ real = NULL; old_len = 0; } else { real = ((struct header *) old) - 1; if (real->magic != MAGIC) /* This is no memory allocated here. */ return (*reallocp)(old, len); old_len = real->length; } /* Keep track of number of calls. */ catomic_increment (&calls[idx_realloc]); if (len > old_len) { /* Keep track of total memory consumption for `realloc'. */ catomic_add (&total[idx_realloc], len - old_len); /* Keep track of total memory requirement. */ catomic_add (&grand_total, len - old_len); } if (len == 0 && old != NULL) { /* Special case. */ catomic_increment (&realloc_free); /* Keep track of total memory freed using `free'. */ catomic_add (&total[idx_free], real->length); /* Update the allocation data and write out the records if necessary. */ update_data (NULL, 0, old_len); /* Do the real work. */ (*freep) (real); return NULL; } /* Remember the size of the request. */ if (len < 65536) catomic_increment (&histogram[len / 16]); else catomic_increment (&large); /* Total number of calls of any of the functions. */ catomic_increment (&calls_total); /* Do the real work. */ result = (struct header *) (*reallocp)(real, len + sizeof (struct header)); if (result == NULL) { catomic_increment (&failed[idx_realloc]); return NULL; } /* Record whether the reduction/increase happened in place. */ if (real == result) catomic_increment (&inplace); /* Was the buffer increased? */ if (old_len > len) catomic_increment (&decreasing); /* Update the allocation data and write out the records if necessary. */ update_data (result, len, old_len); /* Return the pointer to the user buffer. */ return (void *) (result + 1); } /* `calloc' replacement. We keep track of the memory usage if this is the correct program. */ void * calloc (size_t n, size_t len) { struct header *result; size_t size = n * len; /* Determine real implementation if not already happened. */ if (__glibc_unlikely (initialized <= 0)) { if (initialized == -1) return NULL; me (); } /* If this is not the correct program just use the normal function. */ if (not_me) return (*callocp)(n, len); /* Keep track of number of calls. */ catomic_increment (&calls[idx_calloc]); /* Keep track of total memory consumption for `calloc'. */ catomic_add (&total[idx_calloc], size); /* Keep track of total memory requirement. */ catomic_add (&grand_total, size); /* Remember the size of the request. */ if (size < 65536) catomic_increment (&histogram[size / 16]); else catomic_increment (&large); /* Total number of calls of any of the functions. */ ++calls_total; /* Do the real work. */ result = (struct header *) (*mallocp)(size + sizeof (struct header)); if (result == NULL) { catomic_increment (&failed[idx_calloc]); return NULL; } /* Update the allocation data and write out the records if necessary. */ update_data (result, size, 0); /* Do what `calloc' would have done and return the buffer to the caller. */ return memset (result + 1, '\0', size); } /* `free' replacement. We keep track of the memory usage if this is the correct program. */ void free (void *ptr) { struct header *real; /* Determine real implementation if not already happened. */ if (__glibc_unlikely (initialized <= 0)) { if (initialized == -1) return; me (); } /* If this is not the correct program just use the normal function. */ if (not_me) { (*freep) (ptr); return; } /* `free (NULL)' has no effect. */ if (ptr == NULL) { catomic_increment (&calls[idx_free]); return; } /* Determine the pointer to the header. */ real = ((struct header *) ptr) - 1; if (real->magic != MAGIC) { /* This block wasn't allocated here. */ (*freep) (ptr); return; } /* Keep track of number of calls. */ catomic_increment (&calls[idx_free]); /* Keep track of total memory freed using `free'. */ catomic_add (&total[idx_free], real->length); /* Update the allocation data and write out the records if necessary. */ update_data (NULL, 0, real->length); /* Do the real work. */ (*freep) (real); } /* `mmap' replacement. We do not have to keep track of the size since `munmap' will get it as a parameter. */ void * mmap (void *start, size_t len, int prot, int flags, int fd, off_t offset) { void *result = NULL; /* Determine real implementation if not already happened. */ if (__glibc_unlikely (initialized <= 0)) { if (initialized == -1) return NULL; me (); } /* Always get a block. We don't need extra memory. */ result = (*mmapp)(start, len, prot, flags, fd, offset); if (!not_me && trace_mmap) { int idx = (flags & MAP_ANON ? idx_mmap_a : prot & PROT_WRITE ? idx_mmap_w : idx_mmap_r); /* Keep track of number of calls. */ catomic_increment (&calls[idx]); /* Keep track of total memory consumption for `malloc'. */ catomic_add (&total[idx], len); /* Keep track of total memory requirement. */ catomic_add (&grand_total, len); /* Remember the size of the request. */ if (len < 65536) catomic_increment (&histogram[len / 16]); else catomic_increment (&large); /* Total number of calls of any of the functions. */ catomic_increment (&calls_total); /* Check for failures. */ if (result == NULL) catomic_increment (&failed[idx]); else if (idx == idx_mmap_w) /* Update the allocation data and write out the records if necessary. Note the first parameter is NULL which means the size is not tracked. */ update_data (NULL, len, 0); } /* Return the pointer to the user buffer. */ return result; } /* `mmap64' replacement. We do not have to keep track of the size since `munmap' will get it as a parameter. */ void * mmap64 (void *start, size_t len, int prot, int flags, int fd, off64_t offset) { void *result = NULL; /* Determine real implementation if not already happened. */ if (__glibc_unlikely (initialized <= 0)) { if (initialized == -1) return NULL; me (); } /* Always get a block. We don't need extra memory. */ result = (*mmap64p)(start, len, prot, flags, fd, offset); if (!not_me && trace_mmap) { int idx = (flags & MAP_ANON ? idx_mmap_a : prot & PROT_WRITE ? idx_mmap_w : idx_mmap_r); /* Keep track of number of calls. */ catomic_increment (&calls[idx]); /* Keep track of total memory consumption for `malloc'. */ catomic_add (&total[idx], len); /* Keep track of total memory requirement. */ catomic_add (&grand_total, len); /* Remember the size of the request. */ if (len < 65536) catomic_increment (&histogram[len / 16]); else catomic_increment (&large); /* Total number of calls of any of the functions. */ catomic_increment (&calls_total); /* Check for failures. */ if (result == NULL) catomic_increment (&failed[idx]); else if (idx == idx_mmap_w) /* Update the allocation data and write out the records if necessary. Note the first parameter is NULL which means the size is not tracked. */ update_data (NULL, len, 0); } /* Return the pointer to the user buffer. */ return result; } /* `mremap' replacement. We do not have to keep track of the size since `munmap' will get it as a parameter. */ void * mremap (void *start, size_t old_len, size_t len, int flags, ...) { void *result = NULL; va_list ap; va_start (ap, flags); void *newaddr = (flags & MREMAP_FIXED) ? va_arg (ap, void *) : NULL; va_end (ap); /* Determine real implementation if not already happened. */ if (__glibc_unlikely (initialized <= 0)) { if (initialized == -1) return NULL; me (); } /* Always get a block. We don't need extra memory. */ result = (*mremapp)(start, old_len, len, flags, newaddr); if (!not_me && trace_mmap) { /* Keep track of number of calls. */ catomic_increment (&calls[idx_mremap]); if (len > old_len) { /* Keep track of total memory consumption for `malloc'. */ catomic_add (&total[idx_mremap], len - old_len); /* Keep track of total memory requirement. */ catomic_add (&grand_total, len - old_len); } /* Remember the size of the request. */ if (len < 65536) catomic_increment (&histogram[len / 16]); else catomic_increment (&large); /* Total number of calls of any of the functions. */ catomic_increment (&calls_total); /* Check for failures. */ if (result == NULL) catomic_increment (&failed[idx_mremap]); else { /* Record whether the reduction/increase happened in place. */ if (start == result) catomic_increment (&inplace_mremap); /* Was the buffer increased? */ if (old_len > len) catomic_increment (&decreasing_mremap); /* Update the allocation data and write out the records if necessary. Note the first parameter is NULL which means the size is not tracked. */ update_data (NULL, len, old_len); } } /* Return the pointer to the user buffer. */ return result; } /* `munmap' replacement. */ int munmap (void *start, size_t len) { int result; /* Determine real implementation if not already happened. */ if (__glibc_unlikely (initialized <= 0)) { if (initialized == -1) return -1; me (); } /* Do the real work. */ result = (*munmapp)(start, len); if (!not_me && trace_mmap) { /* Keep track of number of calls. */ catomic_increment (&calls[idx_munmap]); if (__glibc_likely (result == 0)) { /* Keep track of total memory freed using `free'. */ catomic_add (&total[idx_munmap], len); /* Update the allocation data and write out the records if necessary. */ update_data (NULL, 0, len); } else catomic_increment (&failed[idx_munmap]); } return result; } /* Write some statistics to standard error. */ static void __attribute__ ((destructor)) dest (void) { int percent, cnt; unsigned long int maxcalls; /* If we haven't done anything here just return. */ if (not_me) return; /* If we should call any of the memory functions don't do any profiling. */ not_me = true; /* Finish the output file. */ if (fd != -1) { /* Write the partially filled buffer. */ if (buffer_cnt > buffer_size) write (fd, buffer + buffer_size, (buffer_cnt - buffer_size) * sizeof (struct entry)); else write (fd, buffer, buffer_cnt * sizeof (struct entry)); /* Go back to the beginning of the file. We allocated two records here when we opened the file. */ lseek (fd, 0, SEEK_SET); /* Write out a record containing the total size. */ first.stack = peak_total; write (fd, &first, sizeof (struct entry)); /* Write out another record containing the maximum for heap and stack. */ first.heap = peak_heap; first.stack = peak_stack; GETTIME (first.time_low, first.time_high); write (fd, &first, sizeof (struct entry)); /* Close the file. */ close (fd); fd = -1; } /* Write a colorful statistic. */ fprintf (stderr, "\n\ \e[01;32mMemory usage summary:\e[0;0m heap total: %llu, heap peak: %lu, stack peak: %lu\n\ \e[04;34m total calls total memory failed calls\e[0m\n\ \e[00;34m malloc|\e[0m %10lu %12llu %s%12lu\e[00;00m\n\ \e[00;34mrealloc|\e[0m %10lu %12llu %s%12lu\e[00;00m (nomove:%ld, dec:%ld, free:%ld)\n\ \e[00;34m calloc|\e[0m %10lu %12llu %s%12lu\e[00;00m\n\ \e[00;34m free|\e[0m %10lu %12llu\n", (unsigned long long int) grand_total, (unsigned long int) peak_heap, (unsigned long int) peak_stack, (unsigned long int) calls[idx_malloc], (unsigned long long int) total[idx_malloc], failed[idx_malloc] ? "\e[01;41m" : "", (unsigned long int) failed[idx_malloc], (unsigned long int) calls[idx_realloc], (unsigned long long int) total[idx_realloc], failed[idx_realloc] ? "\e[01;41m" : "", (unsigned long int) failed[idx_realloc], (unsigned long int) inplace, (unsigned long int) decreasing, (unsigned long int) realloc_free, (unsigned long int) calls[idx_calloc], (unsigned long long int) total[idx_calloc], failed[idx_calloc] ? "\e[01;41m" : "", (unsigned long int) failed[idx_calloc], (unsigned long int) calls[idx_free], (unsigned long long int) total[idx_free]); if (trace_mmap) fprintf (stderr, "\ \e[00;34mmmap(r)|\e[0m %10lu %12llu %s%12lu\e[00;00m\n\ \e[00;34mmmap(w)|\e[0m %10lu %12llu %s%12lu\e[00;00m\n\ \e[00;34mmmap(a)|\e[0m %10lu %12llu %s%12lu\e[00;00m\n\ \e[00;34m mremap|\e[0m %10lu %12llu %s%12lu\e[00;00m (nomove: %ld, dec:%ld)\n\ \e[00;34m munmap|\e[0m %10lu %12llu %s%12lu\e[00;00m\n", (unsigned long int) calls[idx_mmap_r], (unsigned long long int) total[idx_mmap_r], failed[idx_mmap_r] ? "\e[01;41m" : "", (unsigned long int) failed[idx_mmap_r], (unsigned long int) calls[idx_mmap_w], (unsigned long long int) total[idx_mmap_w], failed[idx_mmap_w] ? "\e[01;41m" : "", (unsigned long int) failed[idx_mmap_w], (unsigned long int) calls[idx_mmap_a], (unsigned long long int) total[idx_mmap_a], failed[idx_mmap_a] ? "\e[01;41m" : "", (unsigned long int) failed[idx_mmap_a], (unsigned long int) calls[idx_mremap], (unsigned long long int) total[idx_mremap], failed[idx_mremap] ? "\e[01;41m" : "", (unsigned long int) failed[idx_mremap], (unsigned long int) inplace_mremap, (unsigned long int) decreasing_mremap, (unsigned long int) calls[idx_munmap], (unsigned long long int) total[idx_munmap], failed[idx_munmap] ? "\e[01;41m" : "", (unsigned long int) failed[idx_munmap]); /* Write out a histoogram of the sizes of the allocations. */ fprintf (stderr, "\e[01;32mHistogram for block sizes:\e[0;0m\n"); /* Determine the maximum of all calls for each size range. */ maxcalls = large; for (cnt = 0; cnt < 65536; cnt += 16) if (histogram[cnt / 16] > maxcalls) maxcalls = histogram[cnt / 16]; for (cnt = 0; cnt < 65536; cnt += 16) /* Only write out the nonzero entries. */ if (histogram[cnt / 16] != 0) { percent = (histogram[cnt / 16] * 100) / calls_total; fprintf (stderr, "%5d-%-5d%12lu ", cnt, cnt + 15, (unsigned long int) histogram[cnt / 16]); if (percent == 0) fputs (" <1% \e[41;37m", stderr); else fprintf (stderr, "%3d%% \e[41;37m", percent); /* Draw a bar with a length corresponding to the current percentage. */ percent = (histogram[cnt / 16] * 50) / maxcalls; while (percent-- > 0) fputc ('=', stderr); fputs ("\e[0;0m\n", stderr); } if (large != 0) { percent = (large * 100) / calls_total; fprintf (stderr, " large %12lu ", (unsigned long int) large); if (percent == 0) fputs (" <1% \e[41;37m", stderr); else fprintf (stderr, "%3d%% \e[41;37m", percent); percent = (large * 50) / maxcalls; while (percent-- > 0) fputc ('=', stderr); fputs ("\e[0;0m\n", stderr); } /* Any following malloc/free etc. calls should generate statistics again, because otherwise freeing something that has been malloced before this destructor (including struct header in front of it) wouldn't be properly freed. */ not_me = false; }
geminy/aidear
oss/glibc/glibc-2.24/malloc/memusage.c
C
gpl-3.0
28,929
----------------------------------- -- Area: Tavnazian Safehold -- NPC: Gennoue -- Type: Weather Reporter ----------------------------------- package.loaded["scripts/zones/Tavnazian_Safehold/TextIDs"] = nil; ----------------------------------- require("scripts/globals/settings"); require("scripts/zones/Tavnazian_Safehold/TextIDs"); ----------------------------------- function onTrade(player,npc,trade) end; function onTrigger(player,npc) player:startEvent(509,0,0,0,0,0,0,0,VanadielTime()); end; function onEventUpdate(player,csid,option) -- printf("CSID: %u",csid); -- printf("RESULT: %u",option); end; function onEventFinish(player,csid,option) -- printf("CSID: %u",csid); -- printf("RESULT: %u",option); end;
Ninjistix/darkstar
scripts/zones/Tavnazian_Safehold/npcs/Gennoue.lua
Lua
gpl-3.0
743
#!/bin/sh if [ $# -lt 1 ]; then cat <<EOF Usage: blackbox_newuser.sh PREFIX EOF exit 1; fi PREFIX="$1" shift 1 . `dirname $0`/subunit.sh samba_tool="$BINDIR/samba-tool" samba4bindir="$BINDIR" samba4srcdir="$SRCDIR/source4" samba4kinit=kinit if test -x $BINDIR/samba4kinit; then samba4kinit=$BINDIR/samba4kinit fi CONFIG="--configfile=$PREFIX/dc/etc/smb.conf" TESTUSER="ktpassUser" testit "user add" $samba_tool user add $CONFIG $TESTUSER testp@ssw0Rd || failed=`expr $failed + 1` KRB5CCNAME="$PREFIX/tmpccache" export KRB5CCNAME echo "testp@ssw0Rd" >$PREFIX/tmppassfile testit "kinit with passwd" $samba4kinit -e arcfour-hmac-md5 --password-file=$PREFIX/tmppassfile $TESTUSER@SAMBA.EXAMPLE.COM || failed=`expr $failed + 1` testit "ktpass" $samba4srcdir/scripting/bin/ktpass.sh --host LOCALDC --out $PREFIX/testuser.kt --princ $TESTUSER --pass "testp@ssw0Rd" --path-to-ldbsearch=$BINDIR/bin|| failed=`expr $failed + 1` rm -f $KRB5CCNAME testit "kinit with keytab" $samba4kinit -e arcfour-hmac-md5 --use-keytab -t $PREFIX/testuser.kt $TESTUSER@SAMBA.EXAMPLE.COM || failed=`expr $failed + 1` rm -f $PREFIX/tmpccache $PREFIX/testuser.kt exit $failed
hef/samba
testprogs/blackbox/test_ktpass.sh
Shell
gpl-3.0
1,165
/** ****************************************************************************** * * @file treeitem.cpp * @author The OpenPilot Team, http://www.openpilot.org Copyright (C) 2010. * @author Tau Labs, http://taulabs.org, Copyright (C) 2014 * @addtogroup GCSPlugins GCS Plugins * @{ * @addtogroup UAVObjectBrowserPlugin UAVObject Browser Plugin * @{ * @brief The UAVObject Browser gadget plugin *****************************************************************************/ /* * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "treeitem.h" #include "fieldtreeitem.h" #include <math.h> QTime* HighLightManager::m_currentTime = NULL; /* Constructor */ HighLightManager::HighLightManager(long checkingInterval, QTime *currentTime) { // Start the timer and connect it to the callback m_expirationTimer.start(checkingInterval); connect(&m_expirationTimer, SIGNAL(timeout()), this, SLOT(checkItemsExpired())); if (currentTime == NULL) m_currentTime = new QTime; m_currentTime = currentTime; } /* * Called to add item to list. Item is only added if absent. * Returns true if item was added, otherwise false. */ bool HighLightManager::add(TreeItem *itemToAdd) { // Lock to ensure thread safety QMutexLocker locker(&m_listMutex); // Check so that the item isn't already in the list if(!m_itemsList.contains(itemToAdd)) { m_itemsList.append(itemToAdd); return true; } return false; } /* * Called to remove item from list. * Returns true if item was removed, otherwise false. */ bool HighLightManager::remove(TreeItem *itemToRemove) { // Lock to ensure thread safety QMutexLocker locker(&m_listMutex); // Remove item and return result return m_itemsList.removeOne(itemToRemove); } /* * Callback called periodically by the timer. * This method checks for expired highlights and * removes them if they are expired. * Expired highlights are restored. */ void HighLightManager::checkItemsExpired() { // Lock to ensure thread safety QMutexLocker locker(&m_listMutex); // Get a mutable iterator for the list QMutableLinkedListIterator<TreeItem*> iter(m_itemsList); // Loop over all items, check if they expired. while(iter.hasNext()) { TreeItem* item = iter.next(); if(item->getHiglightExpires() < *m_currentTime) { // If expired, call removeHighlight item->removeHighlight(); // Remove from list since it is restored. iter.remove(); } } } int TreeItem::m_highlightTimeMs = 500; QTime* TreeItem::m_currentTime = NULL; TreeItem::TreeItem(const QList<QVariant> &data, TreeItem *parent) : QObject(0), isPresentOnHardware(true), m_data(data), m_parent(parent), m_highlight(false), m_changed(false), m_updated(false) { } TreeItem::TreeItem(const QVariant &data, TreeItem *parent) : QObject(0), isPresentOnHardware(true), m_parent(parent), m_highlight(false), m_changed(false), m_updated(false) { m_data << data << "" << ""; } TreeItem::~TreeItem() { qDeleteAll(m_children); } void TreeItem::appendChild(TreeItem *child) { m_children.append(child); child->setParentTree(this); } void TreeItem::removeChild(TreeItem *child) { m_children.removeAll(child); } void TreeItem::insertChild(TreeItem *child) { int index = nameIndex(child->data(0).toString()); m_children.insert(index, child); child->setParentTree(this); } TreeItem *TreeItem::getChild(int index) { return m_children.value(index); } int TreeItem::childCount() const { return m_children.count(); } int TreeItem::row() const { if (m_parent) return m_parent->m_children.indexOf(const_cast<TreeItem*>(this)); return 0; } int TreeItem::columnCount() const { return m_data.count(); } QVariant TreeItem::data(int column) const { return m_data.value(column); } void TreeItem::setData(QVariant value, int column) { m_data.replace(column, value); } void TreeItem::update() { foreach(TreeItem *child, treeChildren()) child->update(); } void TreeItem::apply() { foreach(TreeItem *child, treeChildren()) child->apply(); } /* * Called after a value has changed to trigger highlightning of tree item. */ void TreeItem::setHighlight(bool highlight) { m_highlight = highlight; m_changed = false; if (highlight) { // Update the expires timestamp if (m_currentTime != NULL) m_highlightExpires = m_currentTime->addMSecs(m_highlightTimeMs); else m_highlightExpires = QTime::currentTime().addMSecs(m_highlightTimeMs); // Add to highlightmanager if(m_highlightManager->add(this)) { // Only emit signal if it was added emit updateHighlight(this); } } else if(m_highlightManager->remove(this)) { // Only emit signal if it was removed emit updateHighlight(this); } // If we have a parent, call recursively to update highlight status of parents. // This will ensure that the root of a leaf that is changed also is highlighted. // Only updates that really changes values will trigger highlight of parents. if(m_parent) { m_parent->setHighlight(highlight); } } void TreeItem::setUpdatedOnly(bool updated) { if(this->changed() && updated) { m_updated=updated; m_parent->setUpdatedOnlyParent(); } else if(!updated) m_updated=false; foreach(TreeItem * item,this->treeChildren()) { item->setUpdatedOnly(updated); } } void TreeItem::setUpdatedOnlyParent() { FieldTreeItem * field=dynamic_cast<FieldTreeItem*>(this); TopTreeItem * top=dynamic_cast<TopTreeItem*>(this); if(!field && !top) { m_updated=true; m_parent->setUpdatedOnlyParent(); } } void TreeItem::removeHighlight() { m_highlight = false; //update(); emit updateHighlight(this); } void TreeItem::setHighlightManager(HighLightManager *mgr) { m_highlightManager = mgr; } QTime TreeItem::getHiglightExpires() { return m_highlightExpires; } void TreeItem::setCurrentTime(QTime *currentTime) { if (m_currentTime == NULL) m_currentTime = new QTime; m_currentTime = currentTime; } QList<MetaObjectTreeItem *> TopTreeItem::getMetaObjectItems() { return m_metaObjectTreeItemsPerObjectIds.values(); } QList<DataObjectTreeItem *> TopTreeItem::getDataObjectItems() { return m_objectTreeItemsPerObjectIds.values(); }
mluessi/dronin
ground/gcs/src/plugins/uavobjectbrowser/treeitem.cpp
C++
gpl-3.0
7,630
/************************************************************************* * Copyright 2009-2013 Eucalyptus Systems, Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see http://www.gnu.org/licenses/. * * Please contact Eucalyptus Systems, Inc., 6755 Hollister Ave., Goleta * CA 93117, USA or visit http://www.eucalyptus.com/licenses/ if you need * additional information or have any questions. ************************************************************************/ package com.eucalyptus.objectstorage.metadata; import java.util.HashMap; import java.util.List; import java.util.NoSuchElementException; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.persistence.EntityTransaction; import org.apache.log4j.Logger; import org.hibernate.Criteria; import org.hibernate.PersistentObjectException; import org.hibernate.criterion.Example; import org.hibernate.criterion.Order; import org.hibernate.criterion.Projections; import org.hibernate.criterion.Restrictions; import org.hibernate.exception.ConstraintViolationException; import com.eucalyptus.entities.Entities; import com.eucalyptus.entities.TransactionException; import com.eucalyptus.entities.TransactionResource; import com.eucalyptus.entities.Transactions; import com.eucalyptus.objectstorage.BucketState; import com.eucalyptus.objectstorage.entities.Bucket; import com.eucalyptus.objectstorage.exceptions.IllegalResourceStateException; import com.eucalyptus.objectstorage.exceptions.MetadataOperationFailureException; import com.eucalyptus.objectstorage.exceptions.NoSuchEntityException; import com.eucalyptus.objectstorage.exceptions.s3.NoSuchBucketException; import com.eucalyptus.objectstorage.exceptions.s3.S3Exception; import com.eucalyptus.objectstorage.util.ObjectStorageProperties.VersioningStatus; import com.eucalyptus.storage.msgs.s3.AccessControlPolicy; import com.google.common.base.Function; import com.google.common.base.Objects; /** * Three types of failures on a metadata operation: IllegalResourceState - entity is not in a state where the update is a valid change (e.g. * deleting->creating) EntityNotFound - the entity is no longer found on the backend (e.g. deleted while other update pending) * MetadataOperationFailureException - the operation could not complete, db failure, etc * */ public class DbBucketMetadataManagerImpl implements BucketMetadataManager { private static final Logger LOG = Logger.getLogger(DbBucketMetadataManagerImpl.class); public void start() throws Exception {} public void stop() throws Exception {} @Override public Bucket persistBucketInCreatingState(@Nonnull String bucketName, @Nonnull AccessControlPolicy acp, @Nullable String iamUserId, @Nullable String location) throws IllegalResourceStateException, MetadataOperationFailureException, NoSuchEntityException { Bucket initialized; try { initialized = Bucket.getInitializedBucket(bucketName, iamUserId, acp, location); } catch (Exception e) { throw new MetadataOperationFailureException(e); } return transitionBucketToState(initialized, BucketState.creating); } @Override public Bucket lookupExtantBucket(@Nonnull String bucketName) throws NoSuchEntityException, MetadataOperationFailureException { try { Bucket searchExample = new Bucket(bucketName).withState(BucketState.extant); return Transactions.find(searchExample); } catch (NoSuchElementException e) { throw new NoSuchEntityException(bucketName); } catch (Exception e) { LOG.warn("Error querying bucket existence in db", e); throw new MetadataOperationFailureException(e); } } @Override public Bucket lookupBucket(@Nonnull String bucketName) throws NoSuchEntityException, MetadataOperationFailureException { try { Bucket searchExample = new Bucket(bucketName); return Transactions.find(searchExample); } catch (NoSuchElementException e) { throw new NoSuchEntityException(bucketName); } catch (Exception e) { LOG.warn("Error querying bucket existence in db", e); throw new MetadataOperationFailureException(e); } } @Override public Bucket lookupBucketByUuid(@Nonnull String bucketUuid) throws NoSuchEntityException, MetadataOperationFailureException { try { Bucket searchExample = new Bucket().withUuid(bucketUuid); return Transactions.find(searchExample); } catch (NoSuchElementException e) { throw new NoSuchEntityException(bucketUuid); } catch (Exception e) { LOG.warn("Error querying bucket existence in db", e); throw new MetadataOperationFailureException(e); } } @Override public List<Bucket> getBucketsForDeletion() throws Exception { try { // Rely on uniqueness contraint that only one record can be in // either creating or extant state with that bucket name Bucket searchBucket = new Bucket().withState(BucketState.deleting); return Transactions.findAll(searchBucket); } catch (NoSuchElementException e) { throw e; } catch (Exception e) { LOG.error("Error querying bucket existence in db", e); throw e; } } @Override public Bucket transitionBucketToState(@Nonnull final Bucket bucket, @Nonnull BucketState destState) throws NoSuchEntityException, IllegalResourceStateException, MetadataOperationFailureException { Function<Bucket, Bucket> transitionFunction = null; switch (destState) { case creating: transitionFunction = BucketStateTransitions.TRANSITION_TO_CREATING; break; case extant: transitionFunction = BucketStateTransitions.TRANSITION_TO_EXTANT; break; case deleting: transitionFunction = BucketStateTransitions.TRANSITION_TO_DELETING; break; default: LOG.error("Unexpected destination state: " + destState); throw new IllegalArgumentException(); } try { return Entities.asTransaction(Bucket.class, transitionFunction).apply(bucket); } catch (IllegalResourceStateException e) { throw e; } catch (ConstraintViolationException e) { IllegalResourceStateException ex = new IllegalResourceStateException(); ex.initCause(e); throw ex; } catch (PersistentObjectException e) { // Object passed for merge is not found on the db. throw new NoSuchEntityException("Bucket entity not found for merge", e); } catch (Exception e) { throw new MetadataOperationFailureException(e); } } @Override public void deleteBucketMetadata(@Nonnull final Bucket bucket) throws Exception { try (TransactionResource trans = Entities.transactionFor(Bucket.class)) { Bucket bucketToDelete = Entities.uniqueResult(bucket); if (BucketState.deleting.equals(bucketToDelete.getState())) { // Remove the record. Entities.delete(bucketToDelete); } else { throw new IllegalResourceStateException("Bucket not in deleting state, no valid transition to deleted", null, BucketState.deleting.toString(), bucketToDelete.getState().toString()); } trans.commit(); } catch (NoSuchElementException e) { // Ok, continue. LOG.trace("Bucket deletion finalization for (bucket uuid) " + bucket.getBucketUuid() + " failed to find entity record. Returning normally"); } } @Override public List<Bucket> lookupBucketsByOwner(String ownerCanonicalId) throws MetadataOperationFailureException { Bucket searchBucket = new Bucket().withState(BucketState.extant); searchBucket.setOwnerCanonicalId(ownerCanonicalId); List<Bucket> buckets = null; try (TransactionResource trans = Entities.transactionFor(Bucket.class)) { Criteria searchCriteria = Entities.createCriteria(Bucket.class); Example example = Example.create(searchBucket); searchCriteria.add(example); searchCriteria.addOrder(Order.asc("bucketName")); searchCriteria.setReadOnly(true); buckets = searchCriteria.list(); trans.commit(); return buckets; } catch (Exception e) { LOG.error("Error listing buckets for user " + ownerCanonicalId + " due to DB transaction error", e); throw new MetadataOperationFailureException(e); } } @Override public List<Bucket> lookupBucketsByState(BucketState state) throws TransactionException { Bucket searchBucket = new Bucket().withState(state); List<Bucket> buckets = null; try (TransactionResource trans = Entities.transactionFor(Bucket.class)) { Criteria searchCriteria = Entities.createCriteria(Bucket.class); Example example = Example.create(searchBucket); searchCriteria.add(example); searchCriteria.addOrder(Order.asc("bucketName")); searchCriteria.setReadOnly(true); buckets = searchCriteria.list(); trans.commit(); return buckets; } catch (Exception e) { LOG.error("Error listing buckets in the state: " + state + " due to DB transaction error", e); throw e; } } @Override public List<Bucket> lookupBucketsByUser(String userIamId) throws MetadataOperationFailureException { Bucket searchBucket = new Bucket().withState(BucketState.extant); searchBucket.setOwnerIamUserId(userIamId); List<Bucket> buckets = null; try { buckets = Transactions.findAll(searchBucket); return buckets; } catch (TransactionException e) { LOG.error("Error listing buckets for user " + userIamId + " due to DB transaction error", e); throw new MetadataOperationFailureException(e); } } @Override public long countBucketsByUser(String userIamId) throws MetadataOperationFailureException { Bucket searchBucket = new Bucket(); searchBucket.setOwnerIamUserId(userIamId); try (TransactionResource db = Entities.transactionFor(Bucket.class)) { return Entities.count(searchBucket, Restrictions.ne("state", BucketState.deleting), new HashMap<String, String>()); } catch (Exception e) { LOG.warn("Error counting buckets for user " + userIamId + " due to DB transaction error", e); throw new MetadataOperationFailureException(e); } } @Override public long countBucketsByAccount(String canonicalId) throws MetadataOperationFailureException { Bucket searchBucket = new Bucket(); searchBucket.setOwnerCanonicalId(canonicalId); try (TransactionResource db = Entities.transactionFor(Bucket.class)) { return Entities.count(searchBucket, Restrictions.ne("state", BucketState.deleting), new HashMap<String, String>()); } catch (Exception e) { LOG.warn("Error counting buckets for account canonicalId " + canonicalId + " due to DB transaction error", e); throw new MetadataOperationFailureException(e); } } /** * For internal use only (copying, etc) * * @param bucketEntity * @param jsonMarshalledAcl * @return * @throws MetadataOperationFailureException * @throws NoSuchEntityException */ protected Bucket setAcp(@Nonnull Bucket bucketEntity, @Nonnull String jsonMarshalledAcl) throws MetadataOperationFailureException, NoSuchEntityException { try (TransactionResource trans = Entities.transactionFor(Bucket.class)) { Bucket bucket = Entities.merge(bucketEntity); bucket.setAcl(jsonMarshalledAcl); trans.commit(); return bucket; } catch (NoSuchElementException e) { throw new NoSuchEntityException(bucketEntity.getBucketName()); } catch (Exception e) { LOG.error("Error updating acl for bucket " + bucketEntity.getBucketName(), e); throw new MetadataOperationFailureException(e); } } @Override public Bucket setAcp(@Nonnull Bucket bucketEntity, @Nonnull AccessControlPolicy acp) throws MetadataOperationFailureException, NoSuchEntityException { try (TransactionResource trans = Entities.transactionFor(Bucket.class)) { Bucket bucket = Entities.merge(bucketEntity); bucket.setAcl(acp); trans.commit(); return bucket; } catch (NoSuchElementException e) { throw new NoSuchEntityException(bucketEntity.getBucketName()); } catch (Exception e) { LOG.error("Error updating acl for bucket " + bucketEntity.getBucketName(), e); throw new MetadataOperationFailureException(e); } } @Override public Bucket setLoggingStatus(@Nonnull Bucket bucketEntity, @Nonnull Boolean loggingEnabled, @Nullable String destBucket, @Nullable String destPrefix) throws TransactionException, S3Exception { EntityTransaction db = Entities.get(Bucket.class); try { Bucket bucket = Entities.uniqueResult(bucketEntity); bucket.setLoggingEnabled(loggingEnabled); bucket.setTargetBucket(destBucket); bucket.setTargetPrefix(destPrefix); db.commit(); return bucket; } catch (NoSuchElementException e) { throw new NoSuchBucketException(bucketEntity.getBucketName()); } catch (TransactionException e) { LOG.error("Transaction error updating acl for bucket " + bucketEntity.getBucketName(), e); throw e; } finally { if (db != null && db.isActive()) { db.rollback(); } } } @Override public Bucket setVersioning(@Nonnull Bucket bucketEntity, @Nonnull VersioningStatus newState) throws IllegalResourceStateException, MetadataOperationFailureException, NoSuchEntityException { try (TransactionResource trans = Entities.transactionFor(Bucket.class)) { Bucket bucket = Entities.uniqueResult(new Bucket().withUuid(bucketEntity.getBucketUuid())); if (VersioningStatus.Disabled.equals(newState)) { // The user cannot ever set 'Disabled'. throw new IllegalResourceStateException("Invalid versioning state transition"); } bucket.setVersioning(newState); trans.commit(); return bucket; } catch (NoSuchElementException e) { throw new NoSuchEntityException(bucketEntity.getBucketName()); } catch (TransactionException e) { LOG.error("Transaction error updating versioning state for bucket " + bucketEntity.getBucketName(), e); throw new MetadataOperationFailureException(e); } } @Override public long totalSizeOfAllBuckets() throws MetadataOperationFailureException { long size = -1; try (TransactionResource db = Entities.transactionFor(Bucket.class)) { size = Objects.firstNonNull( (Number) Entities.createCriteria(Bucket.class).setProjection(Projections.sum("bucketSize")).setReadOnly(true).uniqueResult(), 0) .longValue(); db.commit(); } catch (Exception e) { LOG.warn("Error getting buckets cumulative size", e); throw new MetadataOperationFailureException(e); } return size; } }
eethomas/eucalyptus
clc/modules/object-storage/src/main/java/com/eucalyptus/objectstorage/metadata/DbBucketMetadataManagerImpl.java
Java
gpl-3.0
15,289
//===--- CodeCompletionHandler.h - Preprocessor code completion -*- C++ -*-===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // // This file defines the CodeCompletionHandler interface, which provides // code-completion callbacks for the preprocessor. // //===----------------------------------------------------------------------===// #ifndef LLVM_CLANG_LEX_CODECOMPLETIONHANDLER_H #define LLVM_CLANG_LEX_CODECOMPLETIONHANDLER_H namespace clang { class IdentifierInfo; class MacroInfo; /// \brief Callback handler that receives notifications when performing code /// completion within the preprocessor. class CodeCompletionHandler { public: virtual ~CodeCompletionHandler(); /// \brief Callback invoked when performing code completion for a preprocessor /// directive. /// /// This callback will be invoked when the preprocessor processes a '#' at the /// start of a line, followed by the code-completion token. /// /// \param InConditional Whether we're inside a preprocessor conditional /// already. virtual void CodeCompleteDirective(bool InConditional) { } /// \brief Callback invoked when performing code completion within a block of /// code that was excluded due to preprocessor conditionals. virtual void CodeCompleteInConditionalExclusion() { } /// \brief Callback invoked when performing code completion in a context /// where the name of a macro is expected. /// /// \param IsDefinition Whether this is the definition of a macro, e.g., /// in a #define. virtual void CodeCompleteMacroName(bool IsDefinition) { } /// \brief Callback invoked when performing code completion in a preprocessor /// expression, such as the condition of an #if or #elif directive. virtual void CodeCompletePreprocessorExpression() { } /// \brief Callback invoked when performing code completion inside a /// function-like macro argument. /// /// There will be another callback invocation after the macro arguments are /// parsed, so this callback should generally be used to note that the next /// callback is invoked inside a macro argument. virtual void CodeCompleteMacroArgument(IdentifierInfo *Macro, MacroInfo *MacroInfo, unsigned ArgumentIndex) { } /// \brief Callback invoked when performing code completion in a part of the /// file where we expect natural language, e.g., a comment, string, or /// #error directive. virtual void CodeCompleteNaturalLanguage() { } }; } #endif // LLVM_CLANG_LEX_CODECOMPLETIONHANDLER_H
Bootz/multicore-opimization
llvm/tools/clang/include/clang/Lex/CodeCompletionHandler.h
C
gpl-3.0
2,809
<?php // This file is part of Moodle - http://moodle.org/ // // Moodle is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Moodle is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Moodle. If not, see <http://www.gnu.org/licenses/>. /** * Module lib related unit tests * * @package core * @category phpunit * @copyright 2016 Juan Leyva * @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later */ defined('MOODLE_INTERNAL') || die(); global $CFG; require_once($CFG->dirroot . '/course/lib.php'); require_once($CFG->dirroot . '/course/modlib.php'); class core_course_modlib_testcase extends advanced_testcase { /** * Test prepare_new_moduleinfo_data */ public function test_prepare_new_moduleinfo_data() { global $DB; $this->resetAfterTest(true); $this->setAdminUser(); $course = self::getDataGenerator()->create_course(); $coursecontext = context_course::instance($course->id); // Test with a complex module, like assign. $assignmodule = $DB->get_record('modules', array('name' => 'assign'), '*', MUST_EXIST); $sectionnumber = 1; list($module, $context, $cw, $cm, $data) = prepare_new_moduleinfo_data($course, $assignmodule->name, $sectionnumber); $this->assertEquals($assignmodule, $module); $this->assertEquals($coursecontext, $context); $this->assertNull($cm); // Not cm yet. $expecteddata = new stdClass(); $expecteddata->section = $sectionnumber; $expecteddata->visible = 1; $expecteddata->course = $course->id; $expecteddata->module = $module->id; $expecteddata->modulename = $module->name; $expecteddata->groupmode = $course->groupmode; $expecteddata->groupingid = $course->defaultgroupingid; $expecteddata->id = ''; $expecteddata->instance = ''; $expecteddata->coursemodule = ''; $expecteddata->advancedgradingmethod_submissions = ''; // Not grading methods enabled by default. $expecteddata->completion = 0; // Unset untestable. unset($data->introeditor); unset($data->_advancedgradingdata); $this->assertEquals($expecteddata, $data); // Create a viewer user. Not able to edit. $viewer = self::getDataGenerator()->create_user(); $this->getDataGenerator()->enrol_user($viewer->id, $course->id); $this->setUser($viewer); $this->expectException('required_capability_exception'); prepare_new_moduleinfo_data($course, $assignmodule->name, $sectionnumber); } /** * Test get_moduleinfo_data */ public function test_get_moduleinfo_data() { global $DB; $this->resetAfterTest(true); $this->setAdminUser(); $course = self::getDataGenerator()->create_course(); $assignmodule = $DB->get_record('modules', array('name' => 'assign'), '*', MUST_EXIST); $assign = self::getDataGenerator()->create_module('assign', array('course' => $course->id)); $assigncm = get_coursemodule_from_id('assign', $assign->cmid); $assigncontext = context_module::instance($assign->cmid); list($cm, $context, $module, $data, $cw) = get_moduleinfo_data($assigncm, $course); $this->assertEquals($assigncm, $cm); $this->assertEquals($assigncontext, $context); $this->assertEquals($assignmodule, $module); // Prepare expected data. $expecteddata = clone $assign; $expecteddata->coursemodule = $assigncm->id; $expecteddata->section = $cw->section; $expecteddata->visible = $assigncm->visible; $expecteddata->visibleoncoursepage = $assigncm->visibleoncoursepage; $expecteddata->cmidnumber = $assigncm->idnumber; $expecteddata->groupmode = groups_get_activity_groupmode($cm); $expecteddata->groupingid = $assigncm->groupingid; $expecteddata->course = $course->id; $expecteddata->module = $module->id; $expecteddata->modulename = $module->name; $expecteddata->instance = $assigncm->instance; $expecteddata->completion = $assigncm->completion; $expecteddata->completionview = $assigncm->completionview; $expecteddata->completionexpected = $assigncm->completionexpected; $expecteddata->completionusegrade = is_null($assigncm->completiongradeitemnumber) ? 0 : 1; $expecteddata->completionpassgrade = $assigncm->completionpassgrade; $expecteddata->completiongradeitemnumber = null; $expecteddata->showdescription = $assigncm->showdescription; $expecteddata->tags = core_tag_tag::get_item_tags_array('core', 'course_modules', $assigncm->id); $expecteddata->availabilityconditionsjson = null; $expecteddata->advancedgradingmethod_submissions = null; if ($items = grade_item::fetch_all(array('itemtype' => 'mod', 'itemmodule' => 'assign', 'iteminstance' => $assign->id, 'courseid' => $course->id))) { // set category if present $gradecat = false; foreach ($items as $item) { if ($gradecat === false) { $gradecat = $item->categoryid; continue; } if ($gradecat != $item->categoryid) { //mixed categories $gradecat = false; break; } } if ($gradecat !== false) { // do not set if mixed categories present $expecteddata->gradecat = $gradecat; } } $expecteddata->gradepass = '0.00'; $expecteddata->completionpassgrade = $assigncm->completionpassgrade; // Unset untestable. unset($expecteddata->cmid); unset($data->introeditor); unset($data->_advancedgradingdata); $this->assertEquals($expecteddata, $data); // Create a viewer user. Not able to edit. $viewer = self::getDataGenerator()->create_user(); $this->getDataGenerator()->enrol_user($viewer->id, $course->id); $this->setUser($viewer); $this->expectException('required_capability_exception'); get_moduleinfo_data($assigncm, $course); } }
michael-milette/moodle
course/tests/modlib_test.php
PHP
gpl-3.0
6,998
/* -*- c++ -*- */ /* * Copyright 2007 Free Software Foundation, Inc. * * This file is part of GNU Radio * * GNU Radio is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3, or (at your option) * any later version. * * GNU Radio is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ #ifdef HAVE_CONFIG_H #include <config.h> #endif #include <mb_gettid.h> #define NEED_STUB #if defined(HAVE_SYS_SYSCALL_H) && defined(HAVE_UNISTD_H) #include <sys/syscall.h> #include <unistd.h> #if defined(SYS_gettid) #undef NEED_STUB int mb_gettid() { return syscall(SYS_gettid); } #endif #endif #if defined(NEED_STUB) int mb_gettid() { return 0; } #endif
GREO/GNU-Radio
mblock/src/lib/mb_gettid.cc
C++
gpl-3.0
1,184
package com.github.bordertech.wcomponents; import com.github.bordertech.wcomponents.util.ComparableComparator; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.beanutils.PropertyUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * A simple tree based table data model which that takes in root node and an array of bean properties in its * constructor. Note that use of this data model is discouraged, as the table data will be stored in the user's session. * * @author Yiannis Paschalidis * @since 1.0.0 * * @deprecated Use {@link WTable} and {@link SimpleBeanBoundTableModel} instead. */ @Deprecated public class SimpleBeanTreeTableDataModel extends AbstractTreeTableDataModel { /** * The logger instance for this class. */ private static final Log LOG = LogFactory.getLog(SimpleBeanTreeTableDataModel.class); /** * A simple comparator that compares comparables, for use in sorting e.g. columns containing Strings. */ public static final ComparableComparator COMPARABLE_COMPARATOR = new ComparableComparator(); /** * The comparators used for sorting, keyed by column index. */ private Map<Integer, Comparator<Object>> comparators; /** * Indicates whether this model is globally editable. */ private boolean editable; /** * The bean properties for each column. */ private final String[] properties; /** * Creates a SimpleBeanBasedTableDataModel containing the given data. * * @param properties the bean properties for each column. * @param root the tree root node. */ public SimpleBeanTreeTableDataModel(final String[] properties, final TableTreeNode root) { super(root); this.properties = properties; } /** * {@inheritDoc} */ @Override public boolean isSortable(final int col) { return comparators != null && comparators.containsKey(col); } /** * Sets the comparator for the given column, to enable sorting. * * @param col the column to set the comparator on. * @param comparator the comparator to set. */ public void setComparator(final int col, final Comparator comparator) { synchronized (this) { if (comparators == null) { comparators = new HashMap<>(); } } if (comparator != null) { comparators.put(col, comparator); } else { comparators.remove(col); } } /** * {@inheritDoc} */ @Override public Object getValueAt(final TableTreeNode row, final int col) { Object bean = row.getData(); String property = properties[col]; if (bean != null) { if (".".equals(property)) { return bean; } else { try { return PropertyUtils.getProperty(bean, property); } catch (Exception e) { LOG.error("Failed to read bean property " + property + " from " + bean, e); } } } return null; } /** * Indicates whether the given cell is editable. This model only supports editability at a global level. See * {@link #setEditable(boolean)}. * * @param row ignored. * @param col ignored. * @return true if the table is globally editable, false otherwise. */ @Override public boolean isCellEditable(final int row, final int col) { return editable; } /** * {@inheritDoc} */ @Override public void setValueAt(final Object value, final int row, final int col) { if (!editable) { throw new IllegalStateException("Attempted to set a value on an uneditable model"); } TableTreeNode node = getNodeAtLine(row); Object bean = node.getData(); String property = properties[col]; if (bean != null) { if (".".equals(property)) { LOG.error("Set of entire bean is not supported by this model"); //node.setData(value); } else { try { PropertyUtils.setProperty(bean, property, value); } catch (Exception e) { LOG.error("Failed to set bean property " + property + " on " + bean, e); } } } } /** * Indicates whether the data in this model is editable. * * @return true if the data in this model is editable, false otherwise. */ public boolean isEditable() { return editable; } /** * Sets whether the data in this model is editable. By default, the data is not editable. * * @param editable true if the data is editable, false if it is read-only. */ public void setEditable(final boolean editable) { this.editable = editable; } /** * {@inheritDoc} */ @Override public int[] sort(final int col, final boolean ascending) { if (!isSortable(col)) { throw new IllegalStateException("Attempted to sort on column " + col + ", which is not sortable"); } // Obtains the list of top level nodes, sorts them & re-add them in order TableTreeNode root = getRootNode(); List<TableTreeNode> topLevelNodes = new ArrayList<>(root.getChildCount()); for (int i = 0; i < root.getChildCount(); i++) { topLevelNodes.add((TableTreeNode) root.getChildAt(i)); } Comparator<TableTreeNode> comp = new Comparator<TableTreeNode>() { @Override public int compare(final TableTreeNode obj1, final TableTreeNode obj2) { Comparator backing = comparators.get(col); return backing.compare(obj1.getData(), obj2.getData()); } }; if (ascending) { Collections.sort(topLevelNodes, comp); } else { Collections.sort(topLevelNodes, Collections.reverseOrder(comp)); } root.removeAll(); for (TableTreeNode node : topLevelNodes) { root.add(node); } return null; } }
marksreeves/wcomponents
wcomponents-core/src/main/java/com/github/bordertech/wcomponents/SimpleBeanTreeTableDataModel.java
Java
gpl-3.0
5,524
/* YUI 3.8.0 (build 5744) Copyright 2012 Yahoo! Inc. All rights reserved. Licensed under the BSD License. http://yuilibrary.com/license/ */ YUI.add("view-node-map",function(e,t){function i(){}var n=e.namespace("View._buildCfg"),r={};n.aggregates||(n.aggregates=[]),n.aggregates.push("getByNode"),i.getByNode=function(t){var n;return e.one(t).ancestor(function(t){return(n=r[e.stamp(t,!0)])||!1},!0),n||null},i._instances=r,i.prototype={initializer:function(){r[e.stamp(this.get("container"))]=this},destructor:function(){var t=e.stamp(this.get("container"),!0);t in r&&delete r[t]}},e.View.NodeMap=i},"3.8.0",{requires:["view"]});
relipse/cworklog
public_html/js/yui/3.8.0/build/view-node-map/view-node-map-min.js
JavaScript
gpl-3.0
631
#ifndef INC_EXEC_READDATA_H #define INC_EXEC_READDATA_H #include "Exec.h" class Exec_ReadData : public Exec { public: Exec_ReadData() : Exec(GENERAL) {} void Help() const; DispatchObject* Alloc() const { return (DispatchObject*)new Exec_ReadData(); } RetType Execute(CpptrajState&, ArgList&); }; #endif
hainm/cpptraj
src/Exec_ReadData.h
C
gpl-3.0
321
<?php /* Copyright (C) 2011 Dimitri Mouillard <dmouillard@teclib.com> * Copyright (C) 2013-2015 Laurent Destailleur <eldy@users.sourceforge.net> * Copyright (C) 2012-2014 Regis Houssin <regis.houssin@capnetworks.com> * Copyright (C) 2015-2016 Alexandre Spangaro <aspangaro.dolibarr@gmail.com> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ /** * \file htdocs/hrm/index.php * \ingroup hrm * \brief Home page for HRM area. */ require('../main.inc.php'); require_once DOL_DOCUMENT_ROOT.'/core/class/html.form.class.php'; require_once DOL_DOCUMENT_ROOT.'/core/class/html.formother.class.php'; require_once DOL_DOCUMENT_ROOT.'/core/lib/date.lib.php'; require_once DOL_DOCUMENT_ROOT.'/user/class/user.class.php'; require_once DOL_DOCUMENT_ROOT.'/user/class/usergroup.class.php'; require_once DOL_DOCUMENT_ROOT.'/core/lib/functions2.lib.php'; require_once DOL_DOCUMENT_ROOT.'/core/lib/usergroups.lib.php'; if ($conf->deplacement->enabled) require_once DOL_DOCUMENT_ROOT.'/compta/deplacement/class/deplacement.class.php'; if ($conf->expensereport->enabled) require_once DOL_DOCUMENT_ROOT.'/expensereport/class/expensereport.class.php'; require_once DOL_DOCUMENT_ROOT.'/holiday/class/holiday.class.php'; $langs->load('users'); $langs->load('holidays'); $langs->load('trips'); $socid=GETPOST("socid"); // Protection if external user if ($user->societe_id > 0) accessforbidden(); /* * Actions */ // None /* * View */ $holiday = new Holiday($db); $holidaystatic=new Holiday($db); // Update sold if (! empty($conf->holiday->enabled)) { $result = $holiday->updateBalance(); } $childids = $user->getAllChildIds(); $childids[]=$user->id; llxHeader('', $langs->trans('HRMArea')); print load_fiche_titre($langs->trans("HRMArea"),'', 'title_hrm.png'); if (empty($conf->global->MAIN_INFO_SOCIETE_NOM) || empty($conf->global->MAIN_INFO_SOCIETE_COUNTRY)) $setupcompanynotcomplete=1; if (! empty($setupcompanynotcomplete)) { $langs->load("errors"); $warnpicto=img_warning($langs->trans("WarningMandatorySetupNotComplete")); print '<br><div class="warning"><a href="'.DOL_URL_ROOT.'/admin/company.php?mainmenu=home'.(empty($setupcompanynotcomplete)?'':'&action=edit').'">'.$warnpicto.' '.$langs->trans("WarningMandatorySetupNotComplete").'</a></div>'; exit; } print '<div class="fichecenter"><div class="fichethirdleft">'; if (! empty($conf->global->MAIN_SEARCH_FORM_ON_HOME_AREAS)) // This is useless due to the global search combo { if (! empty($conf->holiday->enabled) && $user->rights->holiday->read) { $langs->load("holiday"); $listofsearchfields['search_holiday']=array('text'=>'TitreRequestCP'); } if (! empty($conf->deplacement->enabled) && $user->rights->deplacement->lire) { $langs->load("trips"); $listofsearchfields['search_deplacement']=array('text'=>'ExpenseReport'); } if (! empty($conf->expensereport->enabled) && $user->rights->expensereport->lire) { $langs->load("trips"); $listofsearchfields['search_expensereport']=array('text'=>'ExpenseReport'); } if (count($listofsearchfields)) { print '<form method="post" action="'.DOL_URL_ROOT.'/core/search.php">'; print '<input type="hidden" name="token" value="'.$_SESSION['newtoken'].'">'; print '<table class="noborder nohover centpercent">'; $i=0; foreach($listofsearchfields as $key => $value) { if ($i == 0) print '<tr class="liste_titre"><td colspan="3">'.$langs->trans("Search").'</td></tr>'; print '<tr '.$bc[false].'>'; print '<td class="nowrap"><label for="'.$key.'">'.$langs->trans($value["text"]).'</label></td><td><input type="text" class="flat inputsearch" name="'.$key.'" id="'.$key.'" size="18"></td>'; if ($i == 0) print '<td rowspan="'.count($listofsearchfields).'"><input type="submit" value="'.$langs->trans("Search").'" class="button"></td>'; print '</tr>'; $i++; } print '</table>'; print '</form>'; print '<br>'; } } if (! empty($conf->holiday->enabled)) { if (empty($conf->global->HOLIDAY_HIDE_BALANCE)) { $user_id = $user->id; print '<table class="noborder nohover" width="100%">'; print '<tr class="liste_titre"><th colspan="3">'.$langs->trans("Holidays").'</th></tr>'; print "<tr ".$bc[0].">"; print '<td colspan="3">'; $out=''; $typeleaves=$holiday->getTypes(1,1); foreach($typeleaves as $key => $val) { $nb_type = $holiday->getCPforUser($user->id, $val['rowid']); $nb_holiday += $nb_type; $out .= ' - '.$val['label'].': <strong>'.($nb_type?price2num($nb_type):0).'</strong><br>'; } print $langs->trans('SoldeCPUser', round($nb_holiday,5)).'<br>'; print $out; print '</td>'; print '</tr>'; print '</table><br>'; } elseif (! is_numeric($conf->global->HOLIDAY_HIDE_BALANCE)) { print $langs->trans($conf->global->HOLIDAY_HIDE_BALANCE).'<br>'; } } print '</div><div class="fichetwothirdright"><div class="ficheaddleft">'; $max=10; $langs->load("boxes"); // Latest leave requests if (! empty($conf->holiday->enabled) && $user->rights->holiday->read) { $sql = "SELECT u.rowid as uid, u.lastname, u.firstname, u.login, u.email, u.photo, u.statut, x.rowid, x.rowid as ref, x.fk_type, x.date_debut as date_start, x.date_fin as date_end, x.halfday, x.tms as dm, x.statut as status"; $sql.= " FROM ".MAIN_DB_PREFIX."holiday as x, ".MAIN_DB_PREFIX."user as u"; $sql.= " WHERE u.rowid = x.fk_user"; $sql.= " AND x.entity = ".$conf->entity; if (empty($user->rights->holiday->read_all)) $sql.=' AND x.fk_user IN ('.join(',',$childids).')'; //if (!$user->rights->societe->client->voir && !$user->societe_id) $sql.= " AND x.fk_soc = s. rowid AND s.rowid = sc.fk_soc AND sc.fk_user = " .$user->id; //if (!empty($socid)) $sql.= " AND x.fk_soc = ".$socid; $sql.= $db->order("x.tms","DESC"); $sql.= $db->plimit($max, 0); $result = $db->query($sql); if ($result) { $var=false; $num = $db->num_rows($result); $holidaystatic=new Holiday($db); $userstatic=new User($db); $listhalfday=array('morning'=>$langs->trans("Morning"),"afternoon"=>$langs->trans("Afternoon")); $typeleaves=$holidaystatic->getTypes(1,-1); $i = 0; print '<div class="div-table-responsive">'; print '<table class="noborder" width="100%">'; print '<tr class="liste_titre">'; print '<th colspan="3">'.$langs->trans("BoxTitleLastLeaveRequests",min($max,$num)).'</th>'; print '<th>'.$langs->trans("from").'</th>'; print '<th>'.$langs->trans("to").'</th>'; print '<th align="right">'.$langs->trans("DateModificationShort").'</th>'; print '<th width="16">&nbsp;</th>'; print '</tr>'; if ($num) { while ($i < $num && $i < $max) { $obj = $db->fetch_object($result); $holidaystatic->id=$obj->rowid; $holidaystatic->ref=$obj->ref; $userstatic->id=$obj->uid; $userstatic->lastname=$obj->lastname; $userstatic->firstname=$obj->firstname; $userstatic->login=$obj->login; $userstatic->photo=$obj->photo; $userstatic->email=$obj->email; $userstatic->statut=$obj->statut; print '<tr class="oddeven">'; print '<td>'.$holidaystatic->getNomUrl(1).'</td>'; print '<td>'.$userstatic->getNomUrl(-1, 'leave').'</td>'; print '<td>'.$typeleaves[$obj->fk_type]['label'].'</td>'; $starthalfday=($obj->halfday == -1 || $obj->halfday == 2)?'afternoon':'morning'; $endhalfday=($obj->halfday == 1 || $obj->halfday == 2)?'morning':'afternoon'; print '<td>'.dol_print_date($obj->date_start,'day').' '.$langs->trans($listhalfday[$starthalfday]); print '<td>'.dol_print_date($obj->date_end,'day').' '.$langs->trans($listhalfday[$endhalfday]); print '<td align="right">'.dol_print_date($db->jdate($obj->dm),'day').'</td>'; print '<td>'.$holidaystatic->LibStatut($obj->status,3).'</td>'; print '</tr>'; $i++; } } else { print '<tr class="oddeven"><td colspan="7" class="opacitymedium">'.$langs->trans("None").'</td></tr>'; } print '</table></div><br>'; } else dol_print_error($db); } // Last expense report (old module) if (! empty($conf->deplacement->enabled) && $user->rights->deplacement->lire) { $sql = "SELECT u.rowid as uid, u.lastname, u.firstname, u.login, u.email, u.statut, u.photo, d.rowid, d.dated as date, d.tms as dm, d.km, d.fk_statut"; $sql.= " FROM ".MAIN_DB_PREFIX."deplacement as d, ".MAIN_DB_PREFIX."user as u"; if (!$user->rights->societe->client->voir && !$user->societe_id) $sql.= ", ".MAIN_DB_PREFIX."societe as s, ".MAIN_DB_PREFIX."societe_commerciaux as sc"; $sql.= " WHERE u.rowid = d.fk_user"; $sql.= " AND d.entity = ".$conf->entity; if (empty($user->rights->deplacement->readall) && empty($user->rights->deplacement->lire_tous)) $sql.=' AND d.fk_user IN ('.join(',',$childids).')'; if (!$user->rights->societe->client->voir && !$user->societe_id) $sql.= " AND d.fk_soc = s. rowid AND s.rowid = sc.fk_soc AND sc.fk_user = " .$user->id; if (!empty($socid)) $sql.= " AND d.fk_soc = ".$socid; $sql.= $db->order("d.tms","DESC"); $sql.= $db->plimit($max, 0); $result = $db->query($sql); if ($result) { $var=false; $num = $db->num_rows($result); $i = 0; print '<div class="div-table-responsive">'; print '<table class="noborder" width="100%">'; print '<tr class="liste_titre">'; print '<th colspan="2">'.$langs->trans("BoxTitleLastModifiedExpenses",min($max,$num)).'</th>'; print '<th align="right">'.$langs->trans("FeesKilometersOrAmout").'</th>'; print '<th align="right">'.$langs->trans("DateModificationShort").'</th>'; print '<th width="16">&nbsp;</th>'; print '</tr>'; if ($num) { $total_ttc = $totalam = $total = 0; $deplacementstatic=new Deplacement($db); $userstatic=new User($db); while ($i < $num && $i < $max) { $obj = $db->fetch_object($result); $deplacementstatic->ref=$obj->rowid; $deplacementstatic->id=$obj->rowid; $userstatic->id=$obj->uid; $userstatic->lastname=$obj->lastname; $userstatic->firstname=$obj->firstname; $userstatic->login=$obj->login; $userstatic->email=$obj->email; $userstatic->statut=$obj->statut; $userstatic->photo=$obj->photo; print '<tr class="oddeven">'; print '<td>'.$deplacementstatic->getNomUrl(1).'</td>'; print '<td>'.$userstatic->getNomUrl(-1).'</td>'; print '<td align="right">'.$obj->km.'</td>'; print '<td align="right">'.dol_print_date($db->jdate($obj->dm),'day').'</td>'; print '<td>'.$deplacementstatic->LibStatut($obj->fk_statut,3).'</td>'; print '</tr>'; $i++; } } else { print '<tr class="oddeven"><td colspan="5" class="opacitymedium">'.$langs->trans("None").'</td></tr>'; } print '</table>'; print '</div>'; } else dol_print_error($db); } // Last expense report (new module) if (! empty($conf->expensereport->enabled) && $user->rights->expensereport->lire) { $sql = "SELECT u.rowid as uid, u.lastname, u.firstname, u.login, u.email, u.statut, u.photo, x.rowid, x.ref, x.date_debut as date, x.tms as dm, x.total_ttc, x.fk_statut as status"; $sql.= " FROM ".MAIN_DB_PREFIX."expensereport as x, ".MAIN_DB_PREFIX."user as u"; if (!$user->rights->societe->client->voir && !$user->societe_id) $sql.= ", ".MAIN_DB_PREFIX."societe as s, ".MAIN_DB_PREFIX."societe_commerciaux as sc"; $sql.= " WHERE u.rowid = x.fk_user_author"; $sql.= " AND x.entity = ".$conf->entity; if (empty($user->rights->expensereport->readall) && empty($user->rights->expensereport->lire_tous)) $sql.=' AND x.fk_user_author IN ('.join(',',$childids).')'; //if (!$user->rights->societe->client->voir && !$user->societe_id) $sql.= " AND x.fk_soc = s. rowid AND s.rowid = sc.fk_soc AND sc.fk_user = " .$user->id; //if (!empty($socid)) $sql.= " AND x.fk_soc = ".$socid; $sql.= $db->order("x.tms","DESC"); $sql.= $db->plimit($max, 0); $result = $db->query($sql); if ($result) { $var=false; $num = $db->num_rows($result); $i = 0; print '<div class="div-table-responsive">'; print '<table class="noborder" width="100%">'; print '<tr class="liste_titre">'; print '<th colspan="2">'.$langs->trans("BoxTitleLastModifiedExpenses",min($max,$num)).'</th>'; print '<th align="right">'.$langs->trans("TotalTTC").'</th>'; print '<th align="right">'.$langs->trans("DateModificationShort").'</th>'; print '<th width="16">&nbsp;</th>'; print '</tr>'; if ($num) { $total_ttc = $totalam = $total = 0; $expensereportstatic=new ExpenseReport($db); $userstatic=new User($db); while ($i < $num && $i < $max) { $obj = $db->fetch_object($result); $expensereportstatic->id=$obj->rowid; $expensereportstatic->ref=$obj->ref; $userstatic->id=$obj->uid; $userstatic->lastname=$obj->lastname; $userstatic->firstname=$obj->firstname; $userstatic->email=$obj->email; $userstatic->login=$obj->login; $userstatic->statut=$obj->statut; $userstatic->photo=$obj->photo; print '<tr class="oddeven">'; print '<td>'.$expensereportstatic->getNomUrl(1).'</td>'; print '<td>'.$userstatic->getNomUrl(-1).'</td>'; print '<td align="right">'.price($obj->total_ttc).'</td>'; print '<td align="right">'.dol_print_date($db->jdate($obj->dm),'day').'</td>'; print '<td>'.$expensereportstatic->LibStatut($obj->status,3).'</td>'; print '</tr>'; $i++; } } else { print '<tr class="oddeven"><td colspan="5" class="opacitymedium">'.$langs->trans("None").'</td></tr>'; } print '</table>'; print '</div>'; } else dol_print_error($db); } print '</div></div></div>'; llxFooter(); $db->close();
All-3kcis/dolibarr
htdocs/hrm/index.php
PHP
gpl-3.0
14,703
def load_config(default_values, user_values): if user_values is None: return default_values config = {} for k, v in user_values.items(): if k in default_values: if isinstance(v, dict): cloned = user_values[k].copy() for key, value in default_values[k].items(): if key is not None and key not in user_values[k] \ or user_values[k][key] == '': cloned[key] = value config[k] = cloned else: config[k] = v else: config[k] = v for k, v in default_values.items(): if k not in config: config[k] = v return config def import_class(full_path): path_split = full_path.split('.') path = ".".join(path_split[:-1]) klass = path_split[-1:] mod = __import__(path, fromlist=[klass]) return getattr(mod, klass[0])
CoderBotOrg/coderbotsrv
server/lib/engineauth/utils.py
Python
gpl-3.0
948
<!DOCTYPE html> <!-- fieldset with one invalid element and a barred for constraint validation element --> <html> <head> <style> fieldset:invalid { display: none; } </style> </head> <body> <fieldset id="fieldset"> <input required> <input id='i' value='foo' readonly required> </fieldset> </body> </html>
Yukarumya/Yukarum-Redfoxes
layout/reftests/css-invalid/fieldset/fieldset-invalid-and-barred.html
HTML
mpl-2.0
337
<!DOCTYPE HTML> <html> <head> </head> <body> <iframe id="content"></iframe> </body> </html>
vladikoff/fxa-mochitest
tests/mochitest/chrome/layout/forms/test/bug536567_iframe.html
HTML
mpl-2.0
114
<html> <head> <meta HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> <meta HTTP-EQUIV="Content-Style-Type" CONTENT="text/css"> <link rel="stylesheet" href="../../../foundrydoc.css" type="text/css" charset="ISO-8859-1"> <link rel="stylesheet" href="foundrydoc.css" type="text/css" charset="ISO-8859-1"> </head> <body> <h1>Name</h1> <b>VixHost_Connect</b> <h1>Description</h1> <pre> VixHandle VixHost_Connect(int apiVersion, VixServiceProvider hostType, const char *hostName, int hostPort, const char *userName, const char *password, VixHostOptions options, VixHandle propertyListHandle, VixEventProc *callbackProc, void *clientData); </pre> <p> Creates a host handle. This handle cannot be shared or reused after disconnect. <h1>Parameters</h1> <dl> <dt><i>apiVersion</i></dt> <dd> Must be VIX_API_VERSION. </dd> <dt><i>hostType</i></dt> <dd> With vCenter Server, ESX/ESXi hosts, and VMware Server 2.0, use VIX_SERVICEPROVIDER_VMWARE_VI_SERVER. With VMware Workstation, use VIX_SERVICEPROVIDER_VMWARE_WORKSTATION. With VMware Workstation (shared mode), use VIX_SERVICEPROVIDER_VMWARE_WORKSTATION_SHARED. With VMware Player, use VIX_SERVICEPROVIDER_VMWARE_PLAYER. With VMware Server 1.0.x, use VIX_SERVICEPROVIDER_VMWARE_SERVER. </dd> <dt><i>hostName</i></dt> <dd> Varies by product platform. With vCenter Server, ESX/ESXi hosts, VMware Workstation (shared mode) and VMware Server 2.0, use a URL of the form "https://&lt;hostName&gt;:&lt;port&gt;/sdk" where &lt;hostName&gt; is either the DNS name or IP address. If missing, &lt;port&gt; may default to 443 (see Remarks below). In VIX API 1.10 and later, you can omit "https://" and "/sdk" specifying just the DNS name or IP address. Credentials are required even for connections made locally. With Workstation, use NULL to connect to the local host. With VMware Server 1.0.x, use the DNS name or IP address for remote connections, or the same as Workstation for local connections. </dd> <dt><i>hostPort</i></dt> <dd> TCP/IP port on the remote host. With VMware Workstation and VMware Player, use zero for the local host. With ESX/ESXi hosts, VMware Workstation (shared mode) and VMware Server 2.0 you specify port number within the hostName parameter, so this parameter is ignored (see Remarks below). </dd> <dt><i>login</i></dt> <dd> Username for authentication on the remote machine. With VMware Workstation, VMware Player, and VMware Server 1.0.x, use NULL to authenticate as the current user on local host. With vCenter Server, ESX/ESXi hosts, VMware Workstation (shared mode) and VMware Server 2.0, you must use a valid login. </dd> <dt><i>password</i></dt> <dd> Password for authentication on the remote machine. With VMware Workstation, VMware Player, and VMware Server 1.0.x, use NULL to authenticate as the current user on local host. With ESX/ESXi, VMware Workstation (shared mode) and VMware Server 2.0, you must use a valid login. </dd> <dt><i>options</i></dt> <dd> Should be zero. The option VIX_HOSTOPTION_USE_EVENT_PUMP has been deprecated and may be removed from future versions of the VIX API. </dd> <dt><i>propertyListHandle</i></dt> <dd> Must be VIX_INVALID_HANDLE. </dd> <dt><i>callbackProc</i></dt> <dd> Optional callback of type VixEventProc. </dd> <dt><i>clientData</i></dt> <dd> Optional user supplied opaque data to be passed to optional callback. </dd> </dl> <h1>Return Value</h1> A job handle. When the job completes, retrieve the Host handle from the job handle using the VIX_PROPERTY_JOB_RESULT_HANDLE property. <h1>Remarks</h1> <ul> <li> To specify the local host (where the API client runs) with VMware Workstation and VMware Player, pass null values for the hostName, hostPort, userName, and password parameters. <li> With vCenter Server, ESX/ESXi hosts, and VMware Server 2.0, the URL for the hostName argument may specify the port. Otherwise an HTTPS connection is attempted on port 443. HTTPS is strongly recommended. Port numbers are set during installation of Server 2.0. The installer's default HTTP and HTTPS values are 8222 and 8333 for Server on Windows, or (if not already in use) 80 and 443 for Server on Linux, and 902 for the automation socket, authd. If connecting to a virtual machine though a firewall, port 902 and the communicating port must be opened to allow guest operations. <li> If a VMware ESX host is being managed by a VMware VCenter Server, you should call VixHost_Connect with the hostname or IP address of the VCenter server, not the ESX host. Connecting directly to an ESX host while bypassing its VCenter Server can cause state inconsistency. <li> On Windows, this function should not be called multiple times with different service providers in the same process; doing so will result in a VIX_E_WRAPPER_MULTIPLE_SERVICEPROVIDERS error. A single client process can connect to multiple hosts as long as it connects using the same service provider type. <li> To enable SSL certificate verification, set the value of the options parameter to include the bit flag specified by VIX_HOSTOPTION_VERIFY_SSL_CERT. This option can also be set in the VMware config file by assigning vix.enableSslCertificateCheck as TRUE or FALSE. The vix.sslCertificateFile config option specifies the path to a file containing CA certificates in PEM format. The vix.sslCertificateDirectory config option can specify a directory containing files that each contain a CA certificate. Upon encountering a SSL validation error, the host handle is not created with a resulting error code of VIX_E_NET_HTTP_SSL_SECURITY. <li> The option VIX_HOSTOPTION_USE_EVENT_PUMP has been deprecated and may be removed from future versions of the VIX API. <li> With VMware vCenter Server and ESX/ESXi 4.0 hosts, an existing VI API session can be used instead of the username/password pair to authenticate when connecting. To use an existing VI API session, a VI "clone ticket" is required; call the VI API AcquireCloneTicket() method of the SessionManager object to get this ticket. Using the ticket string returned by this method, call VixHost_Connect() with NULL as the 'username' and the ticket as the 'password'. </ul> <h1>Side Effects</h1> None. <h1>Requirements</h1> vix.h, since VMware Server 1.0 <h1>Example</h1> <pre> #include "vix.h" int main(int argc, char * argv[]) { VixHandle hostHandle = VIX_INVALID_HANDLE; VixHandle jobHandle = VIX_INVALID_HANDLE; VixError err; // Connect as current user on local host. jobHandle = VixHost_Connect(VIX_API_VERSION, VIX_SERVICEPROVIDER_VMWARE_VI_SERVER, "https://viserver.example.com/sdk", // hostName 0, // hostPort "Administrator", // userName "adminpass", // password, 0, // options VIX_INVALID_HANDLE, // propertyListHandle NULL, // callbackProc NULL); // clientData err = VixJob_Wait(jobHandle, VIX_PROPERTY_JOB_RESULT_HANDLE, &hostHandle, VIX_PROPERTY_NONE); if (VIX_OK != err) { // Handle the error... goto abort; } Vix_ReleaseHandle(jobHandle); // Other code goes here... abort: Vix_ReleaseHandle(jobHandle); VixHost_Disconnect(hostHandle); } </pre> </body> </html> <hr>Copyright (C) 2007-2013 VMware, Inc. All rights reserved.
dreadl0ck/govix
vendor/libvix/vix113_reference/lang/c/functions/VixHost_Connect.html
HTML
mpl-2.0
8,292
<!DOCTYPE html> <html class="reftest-wait"> <head> <style type="text/css"> #block1 { height: 20px; margin-bottom: 10px; background-color: green; } #margin-only { margin-top: 20px; margin-bottom: 30px; } #block2 { height: 20px; background-color: green; } </style> <script type="text/javascript"> function test() { document.getElementById('block2').style.marginTop = '40px'; document.documentElement.removeAttribute('class'); } document.addEventListener('MozReftestInvalidate', test); </script> </head> <body> <div id="block1"></div> <div id="margin-only"></div> <div id="block2"></div> </body> </html>
Yukarumya/Yukarum-Redfoxes
layout/reftests/margin-collapsing/block-no-content-2a-dyn.html
HTML
mpl-2.0
610
/* * This program is part of the OpenLMIS logistics management information system platform software. * Copyright © 2013 VillageReach * * This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. *   * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Affero General Public License for more details. * You should have received a copy of the GNU Affero General Public License along with this program.  If not, see http://www.gnu.org/licenses.  For additional information contact info@OpenLMIS.org.  */ package org.openlmis.UiUtils; import com.gargoylesoftware.htmlunit.BrowserVersion; import org.openqa.selenium.WebDriver; import org.openqa.selenium.chrome.ChromeDriver; import org.openqa.selenium.firefox.FirefoxBinary; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.firefox.FirefoxProfile; import org.openqa.selenium.htmlunit.HtmlUnitDriver; import org.openqa.selenium.ie.InternetExplorerDriver; import org.openqa.selenium.remote.DesiredCapabilities; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Hashtable; import java.util.Map; import static java.lang.System.getProperty; import static java.lang.System.setProperty; public class DriverFactory { private String driverType; private String INPUT_ZIP_FILE_IE_DRIVER = null; private String INPUT_ZIP_FILE_CHROME_DRIVER = null; private String CHROME_FOLDER = null; private String OUTPUT_FOLDER = null; Unzip unZip; public WebDriver loadDriver(String browser) throws IOException, InterruptedException { String Separator = getProperty("file.separator"); File parentDir = new File(getProperty("user.dir")); CHROME_FOLDER = parentDir.getPath() + Separator + "test-modules" + Separator + "test-core" + Separator + "src" + Separator + "main" + Separator + "java" + Separator + "org" + Separator + "openlmis" + Separator + "UiUtils" + Separator; OUTPUT_FOLDER = parentDir.getPath() + Separator + "test-modules" + Separator + "test-core" + Separator + "src" + Separator + "main" + Separator + "java" + Separator + "org" + Separator + "openlmis" + Separator + "UiUtils" + Separator; INPUT_ZIP_FILE_IE_DRIVER = OUTPUT_FOLDER + "IEDriverServer_Win32_2.39.0.zip"; INPUT_ZIP_FILE_CHROME_DRIVER = OUTPUT_FOLDER + "chromedriver_win32.zip"; return loadDriver(true, browser); } public String driverType() { return driverType.trim(); } public void deleteExe() { unZip = new Unzip(); unZip.deleteFile(OUTPUT_FOLDER + "IEDriverServer.exe"); unZip.deleteFile(OUTPUT_FOLDER + "chromedriver.exe"); } private WebDriver loadDriver(boolean enableJavascript, String browser) throws InterruptedException, IOException { switch (browser) { case "firefox": driverType = getProperty("web.driver", "Firefox"); return createFirefoxDriver(enableJavascript); case "ie": unZip = new Unzip(); unZip.unZipIt(INPUT_ZIP_FILE_IE_DRIVER, OUTPUT_FOLDER); Thread.sleep(1500); driverType = setProperty("webdriver.ie.driver", OUTPUT_FOLDER + "IEDriverServer.exe"); driverType = getProperty("webdriver.ie.driver"); return createInternetExplorerDriver(); case "chrome": unZip = new Unzip(); unZip.unZipIt(INPUT_ZIP_FILE_CHROME_DRIVER, OUTPUT_FOLDER); Thread.sleep(1500); driverType = setProperty("webdriver.chrome.driver", OUTPUT_FOLDER + "chromedriver"); driverType = getProperty("webdriver.chrome.driver"); return createChromeDriver(); case "chromeM": // To run offline test on Jenkins change CHROME_FOLDER to OUTPUT_FOLDER driverType = setProperty("webdriver.chrome.driver", CHROME_FOLDER + "chromedriver"); driverType = getProperty("webdriver.chrome.driver"); return createChromeDriver(); case "HTMLUnit": return new HtmlUnitDriver(BrowserVersion.INTERNET_EXPLORER_8); default: driverType = getProperty("web.driver", "Firefox"); return createFirefoxDriver(enableJavascript); } } private WebDriver createFirefoxDriver(boolean enableJavascript) { boolean headless = Boolean.parseBoolean(getProperty("headless", "false")); FirefoxProfile profile = new FirefoxProfile(); profile.setAcceptUntrustedCertificates(true); profile.setPreference("signed.applets.codebase_principal_support", true); profile.setPreference("javascript.enabled", enableJavascript); profile.setPreference("browser.helperApps.neverAsk.saveToDisk", "text/csv"); profile.setPreference("browser.download.dir", new File(System.getProperty("user.dir")).getParent()); profile.setPreference("browser.download.folderList", 2); profile.setPreference("dom.storage.enabled", true); profile.setPreference("device.storage.enabled", true); if ((getProperty("os.name").toLowerCase().contains("mac")) && headless) { String LOCAL_FIREFOX_X11_PATH = "/opt/local/bin/firefox-x11"; File binaryFile = new File(LOCAL_FIREFOX_X11_PATH); FirefoxBinary binary = new FirefoxBinary(binaryFile); String LOCAL_X11_DISPLAY = ":5"; binary.setEnvironmentProperty("DISPLAY", LOCAL_X11_DISPLAY); return new FirefoxDriver(binary, profile); } return new FirefoxDriver(profile); } private WebDriver createInternetExplorerDriver() throws IOException { Runtime.getRuntime().exec("RunDll32.exe InetCpl.cpl,ClearMyTracksByProcess 255"); DesiredCapabilities ieCapabilities = DesiredCapabilities.internetExplorer(); ieCapabilities.setCapability("ignoreZoomSetting", true); return new InternetExplorerDriver(ieCapabilities); } private WebDriver createChromeDriver() { DesiredCapabilities capabilities = DesiredCapabilities.chrome(); Map<String, String> prefs = new Hashtable<>(); prefs.put("download.prompt_for_download", "false"); prefs.put("download.default_directory", "C:\\Users\\openlmis\\Downloads"); String[] switches = {"--start-maximized", "--ignore-certificate-errors"}; capabilities.setJavascriptEnabled(true); capabilities.setCapability("chrome.prefs", prefs); capabilities.setCapability("chrome.switches", Arrays.asList(switches)); return new ChromeDriver(capabilities); } }
kelvinmbwilo/vims
test-modules/test-core/src/main/java/org/openlmis/UiUtils/DriverFactory.java
Java
agpl-3.0
6,593
from datetime import timedelta import json from django.utils import timezone import factory from factory.django import DjangoModelFactory from opaque_keys.edx.locator import CourseLocator from ..models import CourseOverview class CourseOverviewFactory(DjangoModelFactory): class Meta(object): model = CourseOverview django_get_or_create = ('id', ) exclude = ('run', ) version = CourseOverview.VERSION pre_requisite_courses = [] org = 'edX' run = factory.Sequence('2012_Fall_{}'.format) @factory.lazy_attribute def _pre_requisite_courses_json(self): return json.dumps(self.pre_requisite_courses) @factory.lazy_attribute def _location(self): return self.id.make_usage_key('course', 'course') @factory.lazy_attribute def id(self): return CourseLocator(self.org, 'toy', self.run) @factory.lazy_attribute def display_name(self): return "{} Course".format(self.id) @factory.lazy_attribute def start(self): return timezone.now() @factory.lazy_attribute def end(self): return timezone.now() + timedelta(30)
cpennington/edx-platform
openedx/core/djangoapps/content/course_overviews/tests/factories.py
Python
agpl-3.0
1,152
/* * Generated by asn1c-0.9.24 (http://lionet.info/asn1c) * From ASN.1 module "EUTRA-RRC-Definitions" * found in "36331-c10.asn" * `asn1c -S /usr/local/share/asn1c -fcompound-names -fskeletons-copy -gen-PER` */ #include "SupportedBandUTRA-FDD.h" int SupportedBandUTRA_FDD_constraint(asn_TYPE_descriptor_t *td, const void *sptr, asn_app_constraint_failed_f *ctfailcb, void *app_key) { /* Replace with underlying type checker */ td->check_constraints = asn_DEF_NativeEnumerated.check_constraints; return td->check_constraints(td, sptr, ctfailcb, app_key); } /* * This type is implemented using NativeEnumerated, * so here we adjust the DEF accordingly. */ static void SupportedBandUTRA_FDD_1_inherit_TYPE_descriptor(asn_TYPE_descriptor_t *td) { td->free_struct = asn_DEF_NativeEnumerated.free_struct; td->print_struct = asn_DEF_NativeEnumerated.print_struct; td->check_constraints = asn_DEF_NativeEnumerated.check_constraints; td->ber_decoder = asn_DEF_NativeEnumerated.ber_decoder; td->der_encoder = asn_DEF_NativeEnumerated.der_encoder; td->xer_decoder = asn_DEF_NativeEnumerated.xer_decoder; td->xer_encoder = asn_DEF_NativeEnumerated.xer_encoder; td->uper_decoder = asn_DEF_NativeEnumerated.uper_decoder; td->uper_encoder = asn_DEF_NativeEnumerated.uper_encoder; if(!td->per_constraints) td->per_constraints = asn_DEF_NativeEnumerated.per_constraints; td->elements = asn_DEF_NativeEnumerated.elements; td->elements_count = asn_DEF_NativeEnumerated.elements_count; /* td->specifics = asn_DEF_NativeEnumerated.specifics; // Defined explicitly */ } void SupportedBandUTRA_FDD_free(asn_TYPE_descriptor_t *td, void *struct_ptr, int contents_only) { SupportedBandUTRA_FDD_1_inherit_TYPE_descriptor(td); td->free_struct(td, struct_ptr, contents_only); } int SupportedBandUTRA_FDD_print(asn_TYPE_descriptor_t *td, const void *struct_ptr, int ilevel, asn_app_consume_bytes_f *cb, void *app_key) { SupportedBandUTRA_FDD_1_inherit_TYPE_descriptor(td); return td->print_struct(td, struct_ptr, ilevel, cb, app_key); } asn_dec_rval_t SupportedBandUTRA_FDD_decode_ber(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td, void **structure, const void *bufptr, size_t size, int tag_mode) { SupportedBandUTRA_FDD_1_inherit_TYPE_descriptor(td); return td->ber_decoder(opt_codec_ctx, td, structure, bufptr, size, tag_mode); } asn_enc_rval_t SupportedBandUTRA_FDD_encode_der(asn_TYPE_descriptor_t *td, void *structure, int tag_mode, ber_tlv_tag_t tag, asn_app_consume_bytes_f *cb, void *app_key) { SupportedBandUTRA_FDD_1_inherit_TYPE_descriptor(td); return td->der_encoder(td, structure, tag_mode, tag, cb, app_key); } asn_dec_rval_t SupportedBandUTRA_FDD_decode_xer(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td, void **structure, const char *opt_mname, const void *bufptr, size_t size) { SupportedBandUTRA_FDD_1_inherit_TYPE_descriptor(td); return td->xer_decoder(opt_codec_ctx, td, structure, opt_mname, bufptr, size); } asn_enc_rval_t SupportedBandUTRA_FDD_encode_xer(asn_TYPE_descriptor_t *td, void *structure, int ilevel, enum xer_encoder_flags_e flags, asn_app_consume_bytes_f *cb, void *app_key) { SupportedBandUTRA_FDD_1_inherit_TYPE_descriptor(td); return td->xer_encoder(td, structure, ilevel, flags, cb, app_key); } asn_dec_rval_t SupportedBandUTRA_FDD_decode_uper(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td, asn_per_constraints_t *constraints, void **structure, asn_per_data_t *per_data) { SupportedBandUTRA_FDD_1_inherit_TYPE_descriptor(td); return td->uper_decoder(opt_codec_ctx, td, constraints, structure, per_data); } asn_enc_rval_t SupportedBandUTRA_FDD_encode_uper(asn_TYPE_descriptor_t *td, asn_per_constraints_t *constraints, void *structure, asn_per_outp_t *per_out) { SupportedBandUTRA_FDD_1_inherit_TYPE_descriptor(td); return td->uper_encoder(td, constraints, structure, per_out); } static asn_per_constraints_t asn_PER_type_SupportedBandUTRA_FDD_constr_1 GCC_NOTUSED = { { APC_CONSTRAINED | APC_EXTENSIBLE, 4, 4, 0, 15 } /* (0..15,...) */, { APC_UNCONSTRAINED, -1, -1, 0, 0 }, 0, 0 /* No PER value map */ }; static asn_INTEGER_enum_map_t asn_MAP_SupportedBandUTRA_FDD_value2enum_1[] = { { 0, 5, "bandI" }, { 1, 6, "bandII" }, { 2, 7, "bandIII" }, { 3, 6, "bandIV" }, { 4, 5, "bandV" }, { 5, 6, "bandVI" }, { 6, 7, "bandVII" }, { 7, 8, "bandVIII" }, { 8, 6, "bandIX" }, { 9, 5, "bandX" }, { 10, 6, "bandXI" }, { 11, 7, "bandXII" }, { 12, 8, "bandXIII" }, { 13, 7, "bandXIV" }, { 14, 6, "bandXV" }, { 15, 7, "bandXVI" }, { 16, 12, "bandXVII-8a0" }, { 17, 13, "bandXVIII-8a0" }, { 18, 11, "bandXIX-8a0" }, { 19, 10, "bandXX-8a0" }, { 20, 11, "bandXXI-8a0" }, { 21, 12, "bandXXII-8a0" }, { 22, 13, "bandXXIII-8a0" }, { 23, 12, "bandXXIV-8a0" }, { 24, 11, "bandXXV-8a0" }, { 25, 12, "bandXXVI-8a0" }, { 26, 13, "bandXXVII-8a0" }, { 27, 14, "bandXXVIII-8a0" }, { 28, 12, "bandXXIX-8a0" }, { 29, 11, "bandXXX-8a0" }, { 30, 12, "bandXXXI-8a0" }, { 31, 13, "bandXXXII-8a0" } /* This list is extensible */ }; static unsigned int asn_MAP_SupportedBandUTRA_FDD_enum2value_1[] = { 0, /* bandI(0) */ 1, /* bandII(1) */ 2, /* bandIII(2) */ 3, /* bandIV(3) */ 8, /* bandIX(8) */ 4, /* bandV(4) */ 5, /* bandVI(5) */ 6, /* bandVII(6) */ 7, /* bandVIII(7) */ 9, /* bandX(9) */ 10, /* bandXI(10) */ 11, /* bandXII(11) */ 12, /* bandXIII(12) */ 13, /* bandXIV(13) */ 18, /* bandXIX-8a0(18) */ 14, /* bandXV(14) */ 15, /* bandXVI(15) */ 16, /* bandXVII-8a0(16) */ 17, /* bandXVIII-8a0(17) */ 19, /* bandXX-8a0(19) */ 20, /* bandXXI-8a0(20) */ 21, /* bandXXII-8a0(21) */ 22, /* bandXXIII-8a0(22) */ 23, /* bandXXIV-8a0(23) */ 28, /* bandXXIX-8a0(28) */ 24, /* bandXXV-8a0(24) */ 25, /* bandXXVI-8a0(25) */ 26, /* bandXXVII-8a0(26) */ 27, /* bandXXVIII-8a0(27) */ 29, /* bandXXX-8a0(29) */ 30, /* bandXXXI-8a0(30) */ 31 /* bandXXXII-8a0(31) */ /* This list is extensible */ }; static asn_INTEGER_specifics_t asn_SPC_SupportedBandUTRA_FDD_specs_1 = { asn_MAP_SupportedBandUTRA_FDD_value2enum_1, /* "tag" => N; sorted by tag */ asn_MAP_SupportedBandUTRA_FDD_enum2value_1, /* N => "tag"; sorted by N */ 32, /* Number of elements in the maps */ 17, /* Extensions before this member */ 1, /* Strict enumeration */ 0, /* Native long size */ 0 }; static ber_tlv_tag_t asn_DEF_SupportedBandUTRA_FDD_tags_1[] = { (ASN_TAG_CLASS_UNIVERSAL | (10 << 2)) }; asn_TYPE_descriptor_t asn_DEF_SupportedBandUTRA_FDD = { "SupportedBandUTRA-FDD", "SupportedBandUTRA-FDD", SupportedBandUTRA_FDD_free, SupportedBandUTRA_FDD_print, SupportedBandUTRA_FDD_constraint, SupportedBandUTRA_FDD_decode_ber, SupportedBandUTRA_FDD_encode_der, SupportedBandUTRA_FDD_decode_xer, SupportedBandUTRA_FDD_encode_xer, SupportedBandUTRA_FDD_decode_uper, SupportedBandUTRA_FDD_encode_uper, 0, /* Use generic outmost tag fetcher */ asn_DEF_SupportedBandUTRA_FDD_tags_1, sizeof(asn_DEF_SupportedBandUTRA_FDD_tags_1) /sizeof(asn_DEF_SupportedBandUTRA_FDD_tags_1[0]), /* 1 */ asn_DEF_SupportedBandUTRA_FDD_tags_1, /* Same as above */ sizeof(asn_DEF_SupportedBandUTRA_FDD_tags_1) /sizeof(asn_DEF_SupportedBandUTRA_FDD_tags_1[0]), /* 1 */ &asn_PER_type_SupportedBandUTRA_FDD_constr_1, 0, 0, /* Defined elsewhere */ &asn_SPC_SupportedBandUTRA_FDD_specs_1 /* Additional specs */ };
yangchengwork/LTE-Cell-Scanner
asn1_test/LTE-BCCH-DL-SCH-decode/SupportedBandUTRA-FDD.c
C
agpl-3.0
7,375
# -*- coding: utf-8 -*- from __future__ import unicode_literals, absolute_import import six from sys import platform import locale import os.path from pelican.tests.support import unittest, get_settings from pelican.contents import Page, Article, Static, URLWrapper, Author, Category from pelican.settings import DEFAULT_CONFIG from pelican.utils import path_to_url, truncate_html_words, SafeDatetime, posix_join from pelican.signals import content_object_init from jinja2.utils import generate_lorem_ipsum # generate one paragraph, enclosed with <p> TEST_CONTENT = str(generate_lorem_ipsum(n=1)) TEST_SUMMARY = generate_lorem_ipsum(n=1, html=False) class TestPage(unittest.TestCase): def setUp(self): super(TestPage, self).setUp() self.old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, str('C')) self.page_kwargs = { 'content': TEST_CONTENT, 'context': { 'localsiteurl': '', }, 'metadata': { 'summary': TEST_SUMMARY, 'title': 'foo bar', 'author': Author('Blogger', DEFAULT_CONFIG), }, 'source_path': '/path/to/file/foo.ext' } def tearDown(self): locale.setlocale(locale.LC_ALL, self.old_locale) def test_use_args(self): # Creating a page with arguments passed to the constructor should use # them to initialise object's attributes. metadata = {'foo': 'bar', 'foobar': 'baz', 'title': 'foobar', } page = Page(TEST_CONTENT, metadata=metadata, context={'localsiteurl': ''}) for key, value in metadata.items(): self.assertTrue(hasattr(page, key)) self.assertEqual(value, getattr(page, key)) self.assertEqual(page.content, TEST_CONTENT) def test_mandatory_properties(self): # If the title is not set, must throw an exception. page = Page('content') with self.assertRaises(NameError): page.check_properties() page = Page('content', metadata={'title': 'foobar'}) page.check_properties() def test_summary_from_metadata(self): # If a :summary: metadata is given, it should be used page = Page(**self.page_kwargs) self.assertEqual(page.summary, TEST_SUMMARY) def test_summary_max_length(self): # If a :SUMMARY_MAX_LENGTH: is set, and there is no other summary, # generated summary should not exceed the given length. page_kwargs = self._copy_page_kwargs() settings = get_settings() page_kwargs['settings'] = settings del page_kwargs['metadata']['summary'] settings['SUMMARY_MAX_LENGTH'] = None page = Page(**page_kwargs) self.assertEqual(page.summary, TEST_CONTENT) settings['SUMMARY_MAX_LENGTH'] = 10 page = Page(**page_kwargs) self.assertEqual(page.summary, truncate_html_words(TEST_CONTENT, 10)) settings['SUMMARY_MAX_LENGTH'] = 0 page = Page(**page_kwargs) self.assertEqual(page.summary, '') def test_slug(self): page_kwargs = self._copy_page_kwargs() settings = get_settings() page_kwargs['settings'] = settings settings['SLUGIFY_SOURCE'] = "title" page = Page(**page_kwargs) self.assertEqual(page.slug, 'foo-bar') settings['SLUGIFY_SOURCE'] = "basename" page = Page(**page_kwargs) self.assertEqual(page.slug, 'foo') def test_defaultlang(self): # If no lang is given, default to the default one. page = Page(**self.page_kwargs) self.assertEqual(page.lang, DEFAULT_CONFIG['DEFAULT_LANG']) # it is possible to specify the lang in the metadata infos self.page_kwargs['metadata'].update({'lang': 'fr', }) page = Page(**self.page_kwargs) self.assertEqual(page.lang, 'fr') def test_save_as(self): # If a lang is not the default lang, save_as should be set # accordingly. # if a title is defined, save_as should be set page = Page(**self.page_kwargs) self.assertEqual(page.save_as, "pages/foo-bar.html") # if a language is defined, save_as should include it accordingly self.page_kwargs['metadata'].update({'lang': 'fr', }) page = Page(**self.page_kwargs) self.assertEqual(page.save_as, "pages/foo-bar-fr.html") def test_metadata_url_format(self): # Arbitrary metadata should be passed through url_format() page = Page(**self.page_kwargs) self.assertIn('summary', page.url_format.keys()) page.metadata['directory'] = 'test-dir' page.settings = get_settings(PAGE_SAVE_AS='{directory}/{slug}') self.assertEqual(page.save_as, 'test-dir/foo-bar') def test_datetime(self): # If DATETIME is set to a tuple, it should be used to override LOCALE dt = SafeDatetime(2015, 9, 13) page_kwargs = self._copy_page_kwargs() # set its date to dt page_kwargs['metadata']['date'] = dt page = Page(**page_kwargs) # page.locale_date is a unicode string in both python2 and python3 dt_date = dt.strftime(DEFAULT_CONFIG['DEFAULT_DATE_FORMAT']) # dt_date is a byte string in python2, and a unicode string in python3 # Let's make sure it is a unicode string (relies on python 3.3 supporting the u prefix) if type(dt_date) != type(u''): # python2: dt_date = unicode(dt_date, 'utf8') self.assertEqual(page.locale_date, dt_date ) page_kwargs['settings'] = get_settings() # I doubt this can work on all platforms ... if platform == "win32": locale = 'jpn' else: locale = 'ja_JP.utf8' page_kwargs['settings']['DATE_FORMATS'] = {'jp': (locale, '%Y-%m-%d(%a)')} page_kwargs['metadata']['lang'] = 'jp' import locale as locale_module try: page = Page(**page_kwargs) self.assertEqual(page.locale_date, '2015-09-13(\u65e5)') except locale_module.Error: # The constructor of ``Page`` will try to set the locale to # ``ja_JP.utf8``. But this attempt will failed when there is no # such locale in the system. You can see which locales there are # in your system with ``locale -a`` command. # # Until we find some other method to test this functionality, we # will simply skip this test. unittest.skip("There is no locale %s in this system." % locale) def test_template(self): # Pages default to page, metadata overwrites default_page = Page(**self.page_kwargs) self.assertEqual('page', default_page.template) page_kwargs = self._copy_page_kwargs() page_kwargs['metadata']['template'] = 'custom' custom_page = Page(**page_kwargs) self.assertEqual('custom', custom_page.template) def _copy_page_kwargs(self): # make a deep copy of page_kwargs page_kwargs = dict([(key, self.page_kwargs[key]) for key in self.page_kwargs]) for key in page_kwargs: if not isinstance(page_kwargs[key], dict): break page_kwargs[key] = dict([(subkey, page_kwargs[key][subkey]) for subkey in page_kwargs[key]]) return page_kwargs def test_signal(self): # If a title is given, it should be used to generate the slug. def receiver_test_function(sender, instance): pass content_object_init.connect(receiver_test_function, sender=Page) Page(**self.page_kwargs) self.assertTrue(content_object_init.has_receivers_for(Page)) def test_get_content(self): # Test that the content is updated with the relative links to # filenames, tags and categories. settings = get_settings() args = self.page_kwargs.copy() args['settings'] = settings # Tag args['content'] = ('A simple test, with a ' '<a href="|tag|tagname">link</a>') page = Page(**args) content = page.get_content('http://notmyidea.org') self.assertEqual( content, ('A simple test, with a ' '<a href="http://notmyidea.org/tag/tagname.html">link</a>')) # Category args['content'] = ('A simple test, with a ' '<a href="|category|category">link</a>') page = Page(**args) content = page.get_content('http://notmyidea.org') self.assertEqual( content, ('A simple test, with a ' '<a href="http://notmyidea.org/category/category.html">link</a>')) def test_intrasite_link(self): # type does not take unicode in PY2 and bytes in PY3, which in # combination with unicode literals leads to following insane line: cls_name = '_DummyArticle' if six.PY3 else b'_DummyArticle' article = type(cls_name, (object,), {'url': 'article.html'}) args = self.page_kwargs.copy() args['settings'] = get_settings() args['source_path'] = 'content' args['context']['filenames'] = {'article.rst': article} # Classic intrasite link via filename args['content'] = ( 'A simple test, with a ' '<a href="|filename|article.rst">link</a>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'A simple test, with a ' '<a href="http://notmyidea.org/article.html">link</a>' ) # fragment args['content'] = ( 'A simple test, with a ' '<a href="|filename|article.rst#section-2">link</a>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'A simple test, with a ' '<a href="http://notmyidea.org/article.html#section-2">link</a>' ) # query args['content'] = ( 'A simple test, with a ' '<a href="|filename|article.rst' '?utm_whatever=234&highlight=word">link</a>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'A simple test, with a ' '<a href="http://notmyidea.org/article.html' '?utm_whatever=234&highlight=word">link</a>' ) # combination args['content'] = ( 'A simple test, with a ' '<a href="|filename|article.rst' '?utm_whatever=234&highlight=word#section-2">link</a>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'A simple test, with a ' '<a href="http://notmyidea.org/article.html' '?utm_whatever=234&highlight=word#section-2">link</a>' ) def test_intrasite_link_more(self): # type does not take unicode in PY2 and bytes in PY3, which in # combination with unicode literals leads to following insane line: cls_name = '_DummyAsset' if six.PY3 else b'_DummyAsset' args = self.page_kwargs.copy() args['settings'] = get_settings() args['source_path'] = 'content' args['context']['filenames'] = { 'images/poster.jpg': type(cls_name, (object,), {'url': 'images/poster.jpg'}), 'assets/video.mp4': type(cls_name, (object,), {'url': 'assets/video.mp4'}), 'images/graph.svg': type(cls_name, (object,), {'url': 'images/graph.svg'}), 'reference.rst': type(cls_name, (object,), {'url': 'reference.html'}), } # video.poster args['content'] = ( 'There is a video with poster ' '<video controls poster="{filename}/images/poster.jpg">' '<source src="|filename|/assets/video.mp4" type="video/mp4">' '</video>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'There is a video with poster ' '<video controls poster="http://notmyidea.org/images/poster.jpg">' '<source src="http://notmyidea.org/assets/video.mp4" type="video/mp4">' '</video>' ) # object.data args['content'] = ( 'There is a svg object ' '<object data="{filename}/images/graph.svg" type="image/svg+xml"></object>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'There is a svg object ' '<object data="http://notmyidea.org/images/graph.svg" type="image/svg+xml"></object>' ) # blockquote.cite args['content'] = ( 'There is a blockquote with cite attribute ' '<blockquote cite="{filename}reference.rst">blah blah</blockquote>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'There is a blockquote with cite attribute ' '<blockquote cite="http://notmyidea.org/reference.html">blah blah</blockquote>' ) def test_intrasite_link_markdown_spaces(self): # Markdown introduces %20 instead of spaces, this tests that # we support markdown doing this. cls_name = '_DummyArticle' if six.PY3 else b'_DummyArticle' article = type(cls_name, (object,), {'url': 'article-spaces.html'}) args = self.page_kwargs.copy() args['settings'] = get_settings() args['source_path'] = 'content' args['context']['filenames'] = {'article spaces.rst': article} # An intrasite link via filename with %20 as a space args['content'] = ( 'A simple test, with a ' '<a href="|filename|article%20spaces.rst">link</a>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'A simple test, with a ' '<a href="http://notmyidea.org/article-spaces.html">link</a>' ) def test_multiple_authors(self): """Test article with multiple authors.""" args = self.page_kwargs.copy() content = Page(**args) assert content.authors == [content.author] args['metadata'].pop('author') args['metadata']['authors'] = [Author('First Author', DEFAULT_CONFIG), Author('Second Author', DEFAULT_CONFIG)] content = Page(**args) assert content.authors assert content.author == content.authors[0] class TestArticle(TestPage): def test_template(self): # Articles default to article, metadata overwrites default_article = Article(**self.page_kwargs) self.assertEqual('article', default_article.template) article_kwargs = self._copy_page_kwargs() article_kwargs['metadata']['template'] = 'custom' custom_article = Article(**article_kwargs) self.assertEqual('custom', custom_article.template) def test_slugify_category_author(self): settings = get_settings() settings['SLUG_SUBSTITUTIONS'] = [ ('C#', 'csharp') ] settings['ARTICLE_URL'] = '{author}/{category}/{slug}/' settings['ARTICLE_SAVE_AS'] = '{author}/{category}/{slug}/index.html' article_kwargs = self._copy_page_kwargs() article_kwargs['metadata']['author'] = Author("O'Brien", settings) article_kwargs['metadata']['category'] = Category('C# & stuff', settings) article_kwargs['metadata']['title'] = 'fnord' article_kwargs['settings'] = settings article = Article(**article_kwargs) self.assertEqual(article.url, 'obrien/csharp-stuff/fnord/') self.assertEqual(article.save_as, 'obrien/csharp-stuff/fnord/index.html') class TestStatic(unittest.TestCase): def setUp(self): self.settings = get_settings( STATIC_SAVE_AS='{path}', STATIC_URL='{path}', PAGE_SAVE_AS=os.path.join('outpages', '{slug}.html'), PAGE_URL='outpages/{slug}.html') self.context = self.settings.copy() self.static = Static(content=None, metadata={}, settings=self.settings, source_path=posix_join('dir', 'foo.jpg'), context=self.context) self.context['filenames'] = {self.static.source_path: self.static} def tearDown(self): pass def test_attach_to_same_dir(self): """attach_to() overrides a static file's save_as and url. """ page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'fakepage.md')) self.static.attach_to(page) expected_save_as = os.path.join('outpages', 'foo.jpg') self.assertEqual(self.static.save_as, expected_save_as) self.assertEqual(self.static.url, path_to_url(expected_save_as)) def test_attach_to_parent_dir(self): """attach_to() preserves dirs inside the linking document dir. """ page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path='fakepage.md') self.static.attach_to(page) expected_save_as = os.path.join('outpages', 'dir', 'foo.jpg') self.assertEqual(self.static.save_as, expected_save_as) self.assertEqual(self.static.url, path_to_url(expected_save_as)) def test_attach_to_other_dir(self): """attach_to() ignores dirs outside the linking document dir. """ page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'otherdir', 'fakepage.md')) self.static.attach_to(page) expected_save_as = os.path.join('outpages', 'foo.jpg') self.assertEqual(self.static.save_as, expected_save_as) self.assertEqual(self.static.url, path_to_url(expected_save_as)) def test_attach_to_ignores_subsequent_calls(self): """attach_to() does nothing when called a second time. """ page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'fakepage.md')) self.static.attach_to(page) otherdir_settings = self.settings.copy() otherdir_settings.update(dict( PAGE_SAVE_AS=os.path.join('otherpages', '{slug}.html'), PAGE_URL='otherpages/{slug}.html')) otherdir_page = Page(content="other page", metadata={'title': 'otherpage'}, settings=otherdir_settings, source_path=os.path.join('dir', 'otherpage.md')) self.static.attach_to(otherdir_page) otherdir_save_as = os.path.join('otherpages', 'foo.jpg') self.assertNotEqual(self.static.save_as, otherdir_save_as) self.assertNotEqual(self.static.url, path_to_url(otherdir_save_as)) def test_attach_to_does_nothing_after_save_as_referenced(self): """attach_to() does nothing if the save_as was already referenced. (For example, by a {filename} link an a document processed earlier.) """ original_save_as = self.static.save_as page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'fakepage.md')) self.static.attach_to(page) self.assertEqual(self.static.save_as, original_save_as) self.assertEqual(self.static.url, path_to_url(original_save_as)) def test_attach_to_does_nothing_after_url_referenced(self): """attach_to() does nothing if the url was already referenced. (For example, by a {filename} link an a document processed earlier.) """ original_url = self.static.url page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'fakepage.md')) self.static.attach_to(page) self.assertEqual(self.static.save_as, self.static.source_path) self.assertEqual(self.static.url, original_url) def test_attach_to_does_not_override_an_override(self): """attach_to() does not override paths that were overridden elsewhere. (For example, by the user with EXTRA_PATH_METADATA) """ customstatic = Static(content=None, metadata=dict(save_as='customfoo.jpg', url='customfoo.jpg'), settings=self.settings, source_path=os.path.join('dir', 'foo.jpg'), context=self.settings.copy()) page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'fakepage.md')) customstatic.attach_to(page) self.assertEqual(customstatic.save_as, 'customfoo.jpg') self.assertEqual(customstatic.url, 'customfoo.jpg') def test_attach_link_syntax(self): """{attach} link syntax triggers output path override & url replacement. """ html = '<a href="{attach}../foo.jpg">link</a>' page = Page(content=html, metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'otherdir', 'fakepage.md'), context=self.context) content = page.get_content('') self.assertNotEqual(content, html, "{attach} link syntax did not trigger URL replacement.") expected_save_as = os.path.join('outpages', 'foo.jpg') self.assertEqual(self.static.save_as, expected_save_as) self.assertEqual(self.static.url, path_to_url(expected_save_as)) def test_tag_link_syntax(self): "{tag} link syntax triggers url replacement." html = '<a href="{tag}foo">link</a>' page = Page( content=html, metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'otherdir', 'fakepage.md'), context=self.context) content = page.get_content('') self.assertNotEqual(content, html) def test_category_link_syntax(self): "{category} link syntax triggers url replacement." html = '<a href="{category}foo">link</a>' page = Page(content=html, metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'otherdir', 'fakepage.md'), context=self.context) content = page.get_content('') self.assertNotEqual(content, html) class TestURLWrapper(unittest.TestCase): def test_comparisons(self): # URLWrappers are sorted by name wrapper_a = URLWrapper(name='first', settings={}) wrapper_b = URLWrapper(name='last', settings={}) self.assertFalse(wrapper_a > wrapper_b) self.assertFalse(wrapper_a >= wrapper_b) self.assertFalse(wrapper_a == wrapper_b) self.assertTrue(wrapper_a != wrapper_b) self.assertTrue(wrapper_a <= wrapper_b) self.assertTrue(wrapper_a < wrapper_b) wrapper_b.name = 'first' self.assertFalse(wrapper_a > wrapper_b) self.assertTrue(wrapper_a >= wrapper_b) self.assertTrue(wrapper_a == wrapper_b) self.assertFalse(wrapper_a != wrapper_b) self.assertTrue(wrapper_a <= wrapper_b) self.assertFalse(wrapper_a < wrapper_b) wrapper_a.name = 'last' self.assertTrue(wrapper_a > wrapper_b) self.assertTrue(wrapper_a >= wrapper_b) self.assertFalse(wrapper_a == wrapper_b) self.assertTrue(wrapper_a != wrapper_b) self.assertFalse(wrapper_a <= wrapper_b) self.assertFalse(wrapper_a < wrapper_b)
goerz/pelican
pelican/tests/test_contents.py
Python
agpl-3.0
24,186
/* Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or http://ckeditor.com/license */ CKEDITOR.plugins.setLang( 'horizontalrule', 'pl', { toolbar: 'Wstaw poziomą linię' } );
musicEnfanthen/Knora
salsah1/src/public/vendor/ckeditor_4.7.0/plugins/horizontalrule/lang/pl.js
JavaScript
agpl-3.0
234
<?php /** * Shopware 5 * Copyright (c) shopware AG * * According to our dual licensing model, this program can be used either * under the terms of the GNU Affero General Public License, version 3, * or under a proprietary license. * * The texts of the GNU Affero General Public License with an additional * permission and of our proprietary license can be found at and * in the LICENSE file you have received along with this program. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * "Shopware" is a registered trademark of shopware AG. * The licensing of the program under the AGPLv3 does not imply a * trademark license. Therefore any rights, title and interest in * our trademarks remain entirely with us. */ use Shopware\Bundle\BenchmarkBundle\Service\TemplateCachingHandler; use Shopware\Models\Benchmark\BenchmarkConfig; use Shopware\Models\Benchmark\Repository as BenchmarkRepository; use Shopware\Models\Menu\Menu; class Shopware_Controllers_Backend_BenchmarkOverview extends Shopware_Controllers_Backend_ExtJs implements \Shopware\Components\CSRFWhitelistAware { /** * Returns a list with actions which should not be validated for CSRF protection * * @return string[] */ public function getWhitelistedCSRFActions() { return ['index', 'render', 'saveIndustry', 'getShops']; } public function indexAction() { $shopId = $this->getShopId(); /** @var BenchmarkRepository $benchmarkRepository */ $benchmarkRepository = $this->get('shopware.benchmark_bundle.repository.config'); $config = $benchmarkRepository->getConfigForShop($shopId); $this->handleSettings($config); } public function renderAction() { $this->get('plugins')->Controller()->ViewRenderer()->setNoRender(true); $this->Front()->Plugins()->Json()->setRenderer(false); /** @var BenchmarkRepository $benchmarkRepository */ $benchmarkRepository = $this->get('shopware.benchmark_bundle.repository.config'); $config = $benchmarkRepository->getConfigForShop($this->getShopId()); if ($this->hasOutdatedStatistics($config->getLastReceived())) { $this->redirect([ 'controller' => 'BenchmarkOverview', 'action' => 'index', 'shopId' => $this->getShopId(), ]); return; } echo $config->getCachedTemplate(); } public function saveIndustryAction() { $config = $this->request->getParam('config'); /** @var BenchmarkRepository $benchmarkRepository */ $benchmarkRepository = $this->get('shopware.benchmark_bundle.repository.config'); $benchmarkRepository->saveShopConfigs($config); $this->enableMenu(); $this->View()->assign('success', true); } public function getShopsAction() { /** @var BenchmarkRepository $benchmarkRepository */ $benchmarkRepository = $this->get('shopware.benchmark_bundle.repository.config'); $shops = $benchmarkRepository->getShopsWithValidTemplate(); $currentShop = $this->getShopId(); $shops[$currentShop]['active'] = 1; $widgetsAllowed = (int) $this->_isAllowed('swag-bi-base', 'widgets'); $this->View()->assign([ 'shops' => $shops, 'shopSwitchUrl' => $this->Front()->Router()->assemble([ 'controller' => 'BenchmarkOverview', 'action' => 'render', 'shopId' => 'replaceShopId', ]) . '?widgetAllowed=' . $widgetsAllowed, ]); } protected function initAcl() { $this->addAclPermission('index', 'read', 'Insufficient permissions'); $this->addAclPermission('render', 'read', 'Insufficient permissions'); $this->addAclPermission('setIndustry', 'manage', 'Insufficient permissions'); $this->addAclPermission('getShops', 'read', 'Insufficient permissions'); } private function handleSettings(BenchmarkConfig $config = null) { $backendLanguage = $this->get('auth')->getIdentity()->locale->getId() === 1 ? 'de' : 'en'; /** @var BenchmarkRepository $benchmarkRepository */ $benchmarkRepository = $this->get('shopware.benchmark_bundle.repository.config'); if (!$config || $benchmarkRepository->getConfigsCount() === 0) { $this->redirect([ 'controller' => 'BenchmarkLocalOverview', 'action' => 'render', 'template' => 'start', 'lang' => $this->request->getParam('lang', $backendLanguage), ]); return; } if ($this->hasFreshStatistics($config->getLastReceived())) { $this->loadCachedFile(); return; } if (!$config->isActive() || $this->hasOutdatedStatistics($config->getLastReceived())) { $this->redirect([ 'controller' => 'BenchmarkLocalOverview', 'action' => 'render', 'template' => 'waiting', 'lang' => $this->request->getParam('lang', $backendLanguage), ]); return; } $this->loadCachedFile(); } /** * Checks if "lastReceived" is younger than 24 hours. * * @return bool */ private function hasFreshStatistics(\DateTimeInterface $lastReceived) { $today = new \DateTime('now'); $interval = new \DateInterval('PT1H'); $periods = new \DatePeriod($lastReceived, $interval, $today); $hours = iterator_count($periods); return $hours < 24; } /** * Checks if "lastReceived" is older than 7 days. * * @return bool */ private function hasOutdatedStatistics(\DateTimeInterface $lastReceived) { $today = new \DateTime('now'); $interval = new \DateInterval('P1D'); $periods = new \DatePeriod($lastReceived, $interval, $today); $days = iterator_count($periods); return $days > 7; } private function loadCachedFile() { /** @var TemplateCachingHandler $cachingHandler */ $cachingHandler = $this->get('shopware.benchmark_bundle.components.template_caching_handler'); $shopId = $this->getShopId(); if ($cachingHandler->isTemplateCached($shopId)) { $link = $this->get('router')->assemble([ 'controller' => 'BenchmarkOverview', 'action' => 'render', 'shopId' => $shopId, ]); $widgetsAllowed = (int) $this->_isAllowed('swag-bi-base', 'widgets'); $this->redirect($link . '?widgetAllowed=' . $widgetsAllowed); return; } $this->redirect([ 'controller' => 'BenchmarkLocalOverview', 'action' => 'render', 'template' => 'waiting', 'lang' => $this->request->getParam('lang', 'de'), ]); } private function enableMenu() { $em = $this->get('models'); $repo = $em->getRepository(Menu::class); /** @var Menu|null $menuEntry */ $menuEntry = $repo->findOneBy(['controller' => 'Benchmark', 'action' => 'Settings']); if ($menuEntry) { $menuEntry->setActive(true); $em->persist($menuEntry); $em->flush(); } } /** * @return int */ private function getShopId() { $shopId = (int) $this->request->getParam('shopId'); if (!$shopId) { $shopId = $this->get('models')->getRepository(\Shopware\Models\Shop\Shop::class)->getActiveDefault()->getId(); } return $shopId; } }
wlwwt/shopware
engine/Shopware/Bundle/BenchmarkBundle/Controllers/Backend/BenchmarkOverview.php
PHP
agpl-3.0
7,933
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('schedules', '0005_auto_20171010_1722'), ] operations = [ migrations.CreateModel( name='ScheduleExperience', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('experience_type', models.PositiveSmallIntegerField(default=0, choices=[(0, b'Recurring Nudge and Upgrade Reminder'), (1, b'Course Updates')])), ('schedule', models.OneToOneField(related_name='experience', to='schedules.Schedule')), ], ), ]
lduarte1991/edx-platform
openedx/core/djangoapps/schedules/migrations/0006_scheduleexperience.py
Python
agpl-3.0
750
/* * Generated by asn1c-0.9.24 (http://lionet.info/asn1c) * From ASN.1 module "EUTRA-RRC-Definitions" * found in "36331-c10.asn" * `asn1c -S /usr/local/share/asn1c -fcompound-names -fskeletons-copy -gen-PER` */ #ifndef _ReestablishmentCause_H_ #define _ReestablishmentCause_H_ #include <asn_application.h> /* Including external dependencies */ #include <NativeEnumerated.h> #ifdef __cplusplus extern "C" { #endif /* Dependencies */ typedef enum ReestablishmentCause { ReestablishmentCause_reconfigurationFailure = 0, ReestablishmentCause_handoverFailure = 1, ReestablishmentCause_otherFailure = 2, ReestablishmentCause_spare1 = 3 } e_ReestablishmentCause; /* ReestablishmentCause */ typedef long ReestablishmentCause_t; /* Implementation */ extern asn_TYPE_descriptor_t asn_DEF_ReestablishmentCause; asn_struct_free_f ReestablishmentCause_free; asn_struct_print_f ReestablishmentCause_print; asn_constr_check_f ReestablishmentCause_constraint; ber_type_decoder_f ReestablishmentCause_decode_ber; der_type_encoder_f ReestablishmentCause_encode_der; xer_type_decoder_f ReestablishmentCause_decode_xer; xer_type_encoder_f ReestablishmentCause_encode_xer; per_type_decoder_f ReestablishmentCause_decode_uper; per_type_encoder_f ReestablishmentCause_encode_uper; #ifdef __cplusplus } #endif #endif /* _ReestablishmentCause_H_ */ #include <asn_internal.h>
yangchengwork/LTE-Cell-Scanner
asn1_test/LTE-BCCH-DL-SCH-decode/ReestablishmentCause.h
C
agpl-3.0
1,372
""" Tests for OAuth Dispatch python API module. """ import unittest from django.conf import settings from django.http import HttpRequest from django.test import TestCase from oauth2_provider.models import AccessToken from common.djangoapps.student.tests.factories import UserFactory OAUTH_PROVIDER_ENABLED = settings.FEATURES.get('ENABLE_OAUTH2_PROVIDER') if OAUTH_PROVIDER_ENABLED: from openedx.core.djangoapps.oauth_dispatch import api from openedx.core.djangoapps.oauth_dispatch.adapters import DOTAdapter from openedx.core.djangoapps.oauth_dispatch.tests.constants import DUMMY_REDIRECT_URL EXPECTED_DEFAULT_EXPIRES_IN = 36000 @unittest.skipUnless(OAUTH_PROVIDER_ENABLED, 'OAuth2 not enabled') class TestOAuthDispatchAPI(TestCase): """ Tests for oauth_dispatch's api.py module. """ def setUp(self): super().setUp() self.adapter = DOTAdapter() self.user = UserFactory() self.client = self.adapter.create_public_client( name='public app', user=self.user, redirect_uri=DUMMY_REDIRECT_URL, client_id='public-client-id', ) def _assert_stored_token(self, stored_token_value, expected_token_user, expected_client): stored_access_token = AccessToken.objects.get(token=stored_token_value) assert stored_access_token.user.id == expected_token_user.id assert stored_access_token.application.client_id == expected_client.client_id assert stored_access_token.application.user.id == expected_client.user.id def test_create_token_success(self): token = api.create_dot_access_token(HttpRequest(), self.user, self.client) assert token['access_token'] assert token['refresh_token'] self.assertDictContainsSubset( { 'token_type': 'Bearer', 'expires_in': EXPECTED_DEFAULT_EXPIRES_IN, 'scope': '', }, token, ) self._assert_stored_token(token['access_token'], self.user, self.client) def test_create_token_another_user(self): another_user = UserFactory() token = api.create_dot_access_token(HttpRequest(), another_user, self.client) self._assert_stored_token(token['access_token'], another_user, self.client) def test_create_token_overrides(self): expires_in = 4800 token = api.create_dot_access_token( HttpRequest(), self.user, self.client, expires_in=expires_in, scopes=['profile'], ) self.assertDictContainsSubset({'scope': 'profile'}, token) self.assertDictContainsSubset({'expires_in': expires_in}, token)
eduNEXT/edx-platform
openedx/core/djangoapps/oauth_dispatch/tests/test_api.py
Python
agpl-3.0
2,669
/* * Kuali Coeus, a comprehensive research administration system for higher education. * * Copyright 2005-2016 Kuali, Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.kuali.kra.coi.disclosure; import org.apache.commons.lang3.StringUtils; import org.kuali.coeus.sys.framework.keyvalue.FormViewAwareUifKeyValuesFinderBase; import org.kuali.coeus.sys.framework.service.KcServiceLocator; import org.kuali.kra.coi.CoiDisclosureDocument; import org.kuali.kra.coi.CoiDispositionStatus; import org.kuali.rice.core.api.util.ConcreteKeyValue; import org.kuali.rice.core.api.util.KeyValue; import org.kuali.rice.krad.service.BusinessObjectService; import org.kuali.rice.krad.util.GlobalVariables; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * * This class is to serve as a values finder for coi disclosure actions - 'Approve'/'Disapprove'/'Set Disclosure Status' */ public class CoiDispositionStatusValuesFinder extends FormViewAwareUifKeyValuesFinderBase { private static final long serialVersionUID = -6465897852646872789L; private transient BusinessObjectService businessObjectService; @Override public List<KeyValue> getKeyValues() { CoiDisclosureDocument coiDisclosureDocument = (CoiDisclosureDocument) getDocument(); String personId = coiDisclosureDocument.getCoiDisclosure().getDisclosureReporter().getPersonId(); List<CoiDispositionStatus> statuses; if (StringUtils.equals(personId, GlobalVariables.getUserSession().getPrincipalId()) && !coiDisclosureDocument.isViewOnly() && !coiDisclosureDocument.getDocumentHeader().getWorkflowDocument().isEnroute() && !coiDisclosureDocument.getDocumentHeader().getWorkflowDocument().isFinal()) { Map<String, Object> values = new HashMap<String, Object>(); values.put("displayToReporter", true); statuses = (List<CoiDispositionStatus>) getBusinessObjectService().findMatchingOrderBy(CoiDispositionStatus.class, values, "coiDispositionCode", true); } else { statuses = (List<CoiDispositionStatus>) getBusinessObjectService().findAllOrderBy(CoiDispositionStatus.class, "coiDispositionCode", true); } List<KeyValue> keyValues = new ArrayList<KeyValue>(); keyValues.add(new ConcreteKeyValue("", "select")); for (CoiDispositionStatus status : statuses) { keyValues.add(new ConcreteKeyValue(status.getCoiDispositionCode(), status.getDescription())); } return keyValues; } protected BusinessObjectService getBusinessObjectService() { if (businessObjectService == null) { businessObjectService = KcServiceLocator.getService(BusinessObjectService.class); } return businessObjectService; } public void setBusinessObjectService(BusinessObjectService businessObjectService) { this.businessObjectService = businessObjectService; } }
UniversityOfHawaiiORS/kc
coeus-impl/src/main/java/org/kuali/kra/coi/disclosure/CoiDispositionStatusValuesFinder.java
Java
agpl-3.0
3,658
package com.gmail.nossr50.commands.party; import org.bukkit.OfflinePlayer; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import com.gmail.nossr50.mcMMO; import com.gmail.nossr50.datatypes.party.Party; import com.gmail.nossr50.locale.LocaleLoader; import com.gmail.nossr50.party.PartyManager; import com.gmail.nossr50.util.commands.CommandUtils; import com.gmail.nossr50.util.player.UserManager; public class PartyChangeOwnerCommand implements CommandExecutor { @Override public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { switch (args.length) { case 2: Party playerParty = UserManager.getPlayer((Player) sender).getParty(); String targetName = CommandUtils.getMatchedPlayerName(args[1]); OfflinePlayer target = mcMMO.p.getServer().getOfflinePlayer(targetName); if (!playerParty.hasMember(target.getUniqueId())) { sender.sendMessage(LocaleLoader.getString("Party.NotInYourParty", targetName)); return true; } PartyManager.setPartyLeader(target.getUniqueId(), playerParty); return true; default: sender.sendMessage(LocaleLoader.getString("Commands.Usage.2", "party", "owner", "<" + LocaleLoader.getString("Commands.Usage.Player") + ">")); return true; } } }
EvilOlaf/mcMMO
src/main/java/com/gmail/nossr50/commands/party/PartyChangeOwnerCommand.java
Java
agpl-3.0
1,548
/* -*- Mode: C; c-basic-offset: 4; indent-tabs-mode: nil -*- */ /* Copyright (C) 2010 Red Hat, Inc. This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, see <http://www.gnu.org/licenses/>. */ #ifndef _H_ZLIB_DECODER #define _H_ZLIB_DECODER #include "common.h" #include "red_canvas_base.h" #ifndef __GNUC__ #define ZLIB_WINAPI #endif #include <zlib.h> class ZlibDecoder : public SpiceZlibDecoder { public: ZlibDecoder(); ~ZlibDecoder(); void decode(uint8_t *data, int data_size, uint8_t *dest, int dest_size); private: z_stream _z_strm; }; #endif
Open365/spice-web-client
misc/spice-0.12.0/client/zlib_decoder.h
C
agpl-3.0
1,150
<?php /** * @author Vincent Petry <pvince81@owncloud.com> * * @copyright Copyright (c) 2017, ownCloud, Inc. * @license AGPL-3.0 * * This code is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License, version 3, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License, version 3, * along with this program. If not, see <http://www.gnu.org/licenses/> * */ namespace OC\Core\Command; /** * Exception for when the user hit ctrl-c */ class InterruptedException extends \Exception {}
jacklicn/owncloud
core/Command/InterruptedException.php
PHP
agpl-3.0
880
/* * /MathJax/jax/output/HTML-CSS/fonts/TeX/SansSerif/Regular/CombDiacritMarks.js * * Copyright (c) 2009-2015 The MathJax Consortium * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ MathJax.Hub.Insert( MathJax.OutputJax["HTML-CSS"].FONTDATA.FONTS.MathJax_SansSerif, { 768: [694, -527, 0, -417, -199], 769: [694, -527, 0, -302, -84], 770: [694, -527, 0, -422, -79], 771: [677, -543, 0, -417, -84], 772: [631, -552, 0, -431, -70], 774: [694, -508, 0, -427, -74], 775: [680, -576, 0, -302, -198], 776: [680, -582, 0, -397, -104], 778: [694, -527, 0, -319, -99], 779: [694, -527, 0, -399, -84], 780: [654, -487, 0, -422, -79] } ); MathJax.Ajax.loadComplete( MathJax.OutputJax["HTML-CSS"].fontDir + "/SansSerif/Regular/CombDiacritMarks.js" );
hannesk001/SPHERE-Framework
Library/MathJax/2.5.0/jax/output/HTML-CSS/fonts/TeX/SansSerif/Regular/CombDiacritMarks.js
JavaScript
agpl-3.0
1,310
-- -- This program is part of the OpenLMIS logistics management information system platform software. -- Copyright © 2013 VillageReach -- -- This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. --   -- This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Affero General Public License for more details. -- You should have received a copy of the GNU Affero General Public License along with this program.  If not, see http://www.gnu.org/licenses.  For additional information contact info@OpenLMIS.org.  -- INSERT INTO losses_adjustments_types (name, description, additive, displayOrder) VALUES ('TRANSFER_IN','Transfer In' ,TRUE ,2), ('TRANSFER_OUT', 'Transfer Out' ,FALSE, 3), ('DAMAGED', 'Damaged', FALSE ,1), ('LOST', 'Lost',FALSE, 7), ('STOLEN', 'Stolen' ,FALSE, 8), ('EXPIRED' , 'Expired', FALSE ,4), ('PASSED_OPEN_VIAL_TIME_LIMIT', 'Passed Open-Vial Time Limit', FALSE ,5), ('COLD_CHAIN_FAILURE','Cold Chain Failure', FALSE, 6), ('CLINIC_RETURN', 'Clinic Return', TRUE, 9);
kelvinmbwilo/vims
modules/db/src/main/resources/db/migration/V28_1__insert_losses_adjustments_types.sql
SQL
agpl-3.0
1,322
CI specific scripts =================== Static tests ------------ The [static_tests.sh] script runs all static tests and outputs them in a unified output. You can also run this script by running `make static-test` in the base directory. To add a new static test, just write a `check.sh` script or call your static checker directly and add it to the list of static tests in [static_tests.sh]: ```sh ENV1=foobar ENV2=snafu run <your test command> --args ``` Github annotations ------------------ Using [github_annotate.sh] you can generate [Github annotations] for your tests. You can generate both warnings and errors. Before doing anything however, you include [github_annotate.sh]\ (assuming `${RIOTTOOLS is set to}` [dist/tools/]) ```sh . "${RIOTTOOLS}"/ci/github_annotate.sh ``` and set-up the mechanism ```sh github_annotate_setup ``` If your tests generate output you now can pipe that to the file `${LOGFILE}` by using `${LOG}`, e.g. ```sh my_awesome_test | ${LOG} ``` Don't worry, your test will function normal still, if you don't run it in a Github Action! Now you can use `github_annotate_error` and `github_annotate_warning` to generate the actual errors and warnings. Both commands expect 3 parameters: 1. `FILENAME`: The name of the file the warning or error occurred in, 2. `LINENUM`: The number of the line the warning or error occurred in, and 3. `DETAILS`: Some descriptive details or the message of your warning or error. You can parse those from `${LOGFILE}` (e.g. using tools such as `grep`, `sed`, or `awk`) or generate them on the fly, if your script allows for that. E.g. ```sh cat ${LOGFILE} | grep '^.*:[0-9]\+:' | while read error; do github_annotate_error \ $(echo "${error}" | cut -d: -f1) \ $(echo "${error}" | cut -d: -f2) \ $(echo "${error}" | cut -d: -f3-) done ``` If your output has the common output format `<filename>:<lineno>:<details>` you can also use the function `github_annotate_parse_log_default`. It takes the annotation function it should call on every line as an optional parameter with `github_annotate_error` being the default. E.g. ```sh github_annotate_parse_log_default github_annotate_warning ``` does the same as the last example snippet, but uses `github_annotate_warning` instead. If you do not need to provide a file with your error or warning, you can also use `github_annotate_error_no_file` or `github_annotate_warning_no_file`, respectively. Both take a just message as single parameter: ```sh github_annotate_error_no_file "Something is wrong!" ``` After all errors or warnings are annotated, call `github_annotate_teardown` to finish annotations. **Note:** `github_annotate_report_last_run` is called within [static_tests.sh] to attach the actual annotations to your PR. You don't need to call it from within your test if you are adding that test to [static_tests.sh]. Checking if Fast CI Runs are Sufficient --------------------------------------- The script `can_fast_ci_run.py` checks if a given change set a PR contains justifies a full CI run, or whether only building certain apps or all apps for certain boards is sufficient and which those are. The script will return with exit code 0 if a fast CI run is sufficient and yield a JSON containing the apps which need to be rebuild (for all boards) and the list of boards for which all apps need to be rebuild. ### Usage 1. Pull the current upstream state into a branch (default: `master`) 2. Create a temporary branch that contains the PR either rebased on top of the upstream state or merged into the upstream state 3. Check out the branch containing the state of upstream + your PR 4. Run `./dist/tools/ci/can_fast_ci_run.py` #### Options - If the script is not launched in the RIOT repository root, provide a path to the repo root via `--riotbase` parameter - If the upstream state is not in `master`, the `--upstreambranch` parameter can be used to specify it (or a commit of the current upstream state) - If the script opts for a full rebuild, the passing `--explain` will result in the script explaining its reasoning - To inspect the classification of changed files, the `--debug` switch will print it out #### Gotchas - If the script is not launched in a branch that contains all changes of the upstream branch, the diff set will be too large. - The script relies on the presence of a `Makefile` to detect the path of modules. If changed files have no parent directory containing a `Makefile` (e.g. because a module was deleted), the classification will fail. This results in a full CI run, but this is the desired behavior anyway. - Right now, any change in a module that is not a board, or in any package, will result in a full CI run. Maybe the KConfig migration will make it easier to get efficiently get a full list of applications depending on any given module, so that fast CI runs can also be performed when modules and/or packages are changed. [static_tests.sh]: ./static_tests.sh [Github annotations]: https://github.blog/2018-12-14-introducing-check-runs-and-annotations/ [github_annotate.sh]: ./github_annotate.sh [dist/tools]: ../
aabadie/RIOT
dist/tools/ci/README.md
Markdown
lgpl-2.1
5,154
// --------------------------------------------------------------------- // // Copyright (C) 2011 - 2015 by the deal.II authors // // This file is part of the deal.II library. // // The deal.II library is free software; you can use it, redistribute // it, and/or modify it under the terms of the GNU Lesser General // Public License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // The full text of the license can be found in the file LICENSE at // the top level of the deal.II distribution. // // --------------------------------------------------------------------- // check FETools::back_interpolate on parallel vector #include "../tests.h" #include <deal.II/base/utilities.h> #include <deal.II/base/index_set.h> #include <deal.II/lac/parallel_vector.h> #include <deal.II/lac/constraint_matrix.h> #include <deal.II/distributed/tria.h> #include <deal.II/grid/grid_generator.h> #include <deal.II/dofs/dof_handler.h> #include <deal.II/dofs/dof_tools.h> #include <deal.II/fe/fe_q.h> #include <deal.II/fe/fe_tools.h> #include <deal.II/numerics/vector_tools.h> #include <deal.II/base/function.h> #include <fstream> #include <iostream> #include <vector> void test () { const unsigned int dim = 2; parallel::distributed::Triangulation<dim> tria(MPI_COMM_WORLD); GridGenerator::hyper_cube(tria); tria.refine_global(3); tria.begin_active()->set_refine_flag(); tria.execute_coarsening_and_refinement(); FE_Q<dim> fe1(1), fe2(2); DoFHandler<dim> dof1(tria), dof2(tria); dof1.distribute_dofs(fe1); dof2.distribute_dofs(fe2); ConstraintMatrix c1, c2; DoFTools::make_hanging_node_constraints(dof1, c1); c1.close(); DoFTools::make_hanging_node_constraints(dof2, c2); c2.close(); IndexSet locally_relevant_dofs2; DoFTools::extract_locally_relevant_dofs (dof2, locally_relevant_dofs2); parallel::distributed::Vector<double> v2(dof2.locally_owned_dofs(), locally_relevant_dofs2, MPI_COMM_WORLD), v2_interpolated(v2); // set first vector to 1 VectorTools::interpolate(dof2, ConstantFunction<dim>(1.), v2); for (unsigned int i=0; i<v2.local_size(); ++i) Assert(v2.local_element(i) == 1., ExcInternalError()); v2.update_ghost_values(); FETools::back_interpolate(dof2, c2, v2, dof1, c1, v2_interpolated); for (unsigned int i=0; i<v2_interpolated.local_size(); ++i) Assert(v2_interpolated.local_element(i) == 1., ExcInternalError()); } int main (int argc, char **argv) { Utilities::MPI::MPI_InitFinalize mpi_initialization (argc, argv, testing_max_num_threads()); unsigned int myid = Utilities::MPI::this_mpi_process (MPI_COMM_WORLD); deallog.push(Utilities::int_to_string(myid)); if (myid == 0) { std::ofstream logfile("output"); deallog.attach(logfile); deallog << std::setprecision(4); deallog.threshold_double(1.e-10); test(); } else test(); }
ESeNonFossiIo/dealii
tests/mpi/parallel_vector_back_interpolate.cc
C++
lgpl-2.1
2,978
<html> <head> <meta charset='utf-8'> <style> .pass { font-weight: bold; color: green; } .fail { font-weight: bold; color: red; } </style> <script> if (window.testRunner) testRunner.dumpAsText(); function SputnikError(message) { this.message = message; } SputnikError.prototype.toString = function () { return 'SputnikError: ' + this.message; }; var sputnikException; function testPrint(msg) { var span = document.createElement("span"); document.getElementById("console").appendChild(span); // insert it first so XHTML knows the namespace span.innerHTML = msg + '<br />'; } function escapeHTML(text) { return text.toString().replace(/&/g, "&amp;").replace(/</g, "&lt;"); } function printTestPassed(msg) { testPrint('<span><span class="pass">PASS</span> ' + escapeHTML(msg) + '</span>'); } function printTestFailed(msg) { testPrint('<span><span class="fail">FAIL</span> ' + escapeHTML(msg) + '</span>'); } function testFailed(msg) { throw new SputnikError(msg); } var successfullyParsed = false; </script> </head> <body> <p>S11.13.2_A4.11_T2.8</p> <div id='console'></div> <script> try { /** * @name: S11.13.2_A4.11_T2.8; * @section: 11.13.2, 11.10.3; * @assertion: The production x |= y is the same as x = x | y; * @description: Type(x) is different from Type(y) and both types vary between Boolean (primitive or object) and Undefined; */ //CHECK#1 x = true; x |= undefined; if (x !== 1) { testFailed('#1: x = true; x |= undefined; x === 1. Actual: ' + (x)); } //CHECK#2 x = undefined; x |= true; if (x !== 1) { testFailed('#2: x = undefined; x |= true; x === 1. Actual: ' + (x)); } //CHECK#3 x = new Boolean(true); x |= undefined; if (x !== 1) { testFailed('#3: x = new Boolean(true); x |= undefined; x === 1. Actual: ' + (x)); } //CHECK#4 x = undefined; x |= new Boolean(true); if (x !== 1) { testFailed('#4: x = undefined; x |= new Boolean(true); x === 1. Actual: ' + (x)); } } catch (ex) { sputnikException = ex; } var successfullyParsed = true; </script> <script> if (!successfullyParsed) printTestFailed('successfullyParsed is not set'); else if (sputnikException) printTestFailed(sputnikException); else printTestPassed(""); testPrint('<br /><span class="pass">TEST COMPLETE</span>'); </script> </body> </html>
youfoh/webkit-efl
LayoutTests/sputnik/Conformance/11_Expressions/11.13_Assignment_Operators/11.13.2_Compound_Assignment/S11.13.2_A4.11_T2.8.html
HTML
lgpl-2.1
2,323
/****************************************************************/ /* MOOSE - Multiphysics Object Oriented Simulation Environment */ /* */ /* All contents are licensed under LGPL V2.1 */ /* See LICENSE for full restrictions */ /****************************************************************/ #ifndef POROUSFLOWLINEGEOMETRY_H #define POROUSFLOWLINEGEOMETRY_H #include "DiracKernel.h" class PorousFlowLineGeometry; template <> InputParameters validParams<PorousFlowLineGeometry>(); /** * Approximates a borehole by a sequence of Dirac Points */ class PorousFlowLineGeometry : public DiracKernel { public: /** * Creates a new PorousFlowLineGeometry * This reads the file containing the lines of the form * weight x y z * that defines the line geometry. * It also calculates segment-lengths between the points */ PorousFlowLineGeometry(const InputParameters & parameters); protected: /// line length. This is only used if there is only one borehole point const Real _line_length; /// line direction. This is only used if there is only one borehole point const RealVectorValue _line_direction; /** * File defining the geometry of the borehole. Each row has format * weight x y z * and the list of such points defines a polyline that is the line sink */ const std::string _point_file; /// radii of the borehole std::vector<Real> _rs; /// x points of the borehole std::vector<Real> _xs; /// y points of the borehole std::vector<Real> _ys; /// z points of borehole std::vector<Real> _zs; /// the bottom point of the borehole (where bottom_pressure is defined) Point _bottom_point; /// 0.5*(length of polyline segments between points) std::vector<Real> _half_seg_len; /// Add Dirac Points to the line sink virtual void addPoints() override; /// reads a space-separated line of floats from ifs and puts in myvec bool parseNextLineReals(std::ifstream & ifs, std::vector<Real> & myvec); }; #endif // POROUSFLOWLINEGEOMETRY_H
Chuban/moose
modules/porous_flow/include/dirackernels/PorousFlowLineGeometry.h
C
lgpl-2.1
2,124
<html> <head> <meta charset='utf-8'> <style> .pass { font-weight: bold; color: green; } .fail { font-weight: bold; color: red; } </style> <script> if (window.testRunner) testRunner.dumpAsText(); function SputnikError(message) { this.message = message; } SputnikError.prototype.toString = function () { return 'SputnikError: ' + this.message; }; var sputnikException; function testPrint(msg) { var span = document.createElement("span"); document.getElementById("console").appendChild(span); // insert it first so XHTML knows the namespace span.innerHTML = msg + '<br />'; } function escapeHTML(text) { return text.toString().replace(/&/g, "&amp;").replace(/</g, "&lt;"); } function printTestPassed(msg) { testPrint('<span><span class="pass">PASS</span> ' + escapeHTML(msg) + '</span>'); } function printTestFailed(msg) { testPrint('<span><span class="fail">FAIL</span> ' + escapeHTML(msg) + '</span>'); } function testFailed(msg) { throw new SputnikError(msg); } var successfullyParsed = false; </script> </head> <body> <p>S15.10.6.2_A1_T10</p> <div id='console'></div> <script> try { /** * @name: S15.10.6.2_A1_T10; * @section: 15.10.6.2; * @assertion: RegExp.prototype.exec(string) Performs a regular expression match of ToString(string) against the regular expression and * returns an Array object containing the results of the match, or null if the string did not match; * @description: String is 1.01 and RegExp is /1|12/; */ __executed = /1|12/.exec(1.01); __expected = ["1"]; __expected.index=0; __expected.input="1.01"; //CHECK#0 if ((__executed instanceof Array) !== true) { testFailed('#0: __executed = /1|12/.exec(1.01); (__executed instanceof Array) === true'); } //CHECK#1 if (__executed.length !== __expected.length) { testFailed('#1: __executed = /1|12/.exec(1.01); __executed.length === ' + __expected.length + '. Actual: ' + __executed.length); } //CHECK#2 if (__executed.index !== __expected.index) { testFailed('#2: __executed = /1|12/.exec(1.01); __executed.index === ' + __expected.index + '. Actual: ' + __executed.index); } //CHECK#3 if (__executed.input !== __expected.input) { testFailed('#3: __executed = /1|12/.exec(1.01); __executed.input === ' + __expected.input + '. Actual: ' + __executed.input); } //CHECK#4 for(var index=0; index<__expected.length; index++) { if (__executed[index] !== __expected[index]) { testFailed('#4: __executed = /1|12/.exec(1.01); __executed[' + index + '] === ' + __expected[index] + '. Actual: ' + __executed[index]); } } } catch (ex) { sputnikException = ex; } var successfullyParsed = true; </script> <script> if (!successfullyParsed) printTestFailed('successfullyParsed is not set'); else if (sputnikException) printTestFailed(sputnikException); else printTestPassed(""); testPrint('<br /><span class="pass">TEST COMPLETE</span>'); </script> </body> </html>
youfoh/webkit-efl
LayoutTests/sputnik/Conformance/15_Native_Objects/15.10_RegExp/15.10.6/15.10.6.2_RegExp.prototype.exec/S15.10.6.2_A1_T10.html
HTML
lgpl-2.1
2,931
// --------------------------------------------------------------------- // // Copyright (C) 2005 - 2015 by the deal.II authors // // This file is part of the deal.II library. // // The deal.II library is free software; you can use it, redistribute // it, and/or modify it under the terms of the GNU Lesser General // Public License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // The full text of the license can be found in the file LICENSE at // the top level of the deal.II distribution. // // --------------------------------------------------------------------- // in symmetric_tensor_06 we have established that contracting with a // symmetric tensor by hand works as with a full tensor that is stored // in non-symmetric form. here make sure that we can abbreviate the contraction #include "../tests.h" #include <deal.II/base/symmetric_tensor.h> #include <deal.II/base/tensor.h> template <int dim> void test () { const double lambda = 7, mu = 5; SymmetricTensor<4,dim> ts; Tensor<4,dim> ta; for (unsigned int i=0; i<dim; ++i) for (unsigned int j=0; j<dim; ++j) { ta[i][j][i][j] += mu; ta[i][j][j][i] += mu; ta[i][i][j][j] += lambda; } for (unsigned int i=0; i<dim; ++i) for (unsigned int j=0; j<dim; ++j) for (unsigned int k=0; k<dim; ++k) for (unsigned int l=0; l<dim; ++l) ts[i][j][k][l] = ta[i][j][k][l]; SymmetricTensor<2,dim> as, bs; Tensor<2,dim> aa, ba; for (unsigned int i=0; i<dim; ++i) for (unsigned int j=0; j<dim; ++j) as[i][j] = aa[i][j] = (1. + (i+1)*(j+1)); bs = ts * as; // contract indices 2 <-> 0, 3 <-> 1 ba = double_contract<2, 0, 3, 1>(ta, aa); for (unsigned int i=0; i<dim; ++i) for (unsigned int j=0; j<dim; ++j) { AssertThrow (as[i][j] == aa[i][j], ExcInternalError()); AssertThrow (bs[i][j] == ba[i][j], ExcInternalError()); deallog << as[i][j] << ' ' << bs[i][j] << std::endl; } // test distributivity of // multiplication AssertThrow ((as*ts)*as == as*(ts*as), ExcInternalError()); // also test that the elasticity // tensor is positive definite deallog << as *ts *as << std::endl; Assert (as * ts * as > 0, ExcInternalError()); } int main () { std::ofstream logfile("output"); deallog << std::setprecision(3); deallog.attach(logfile); test<2> (); test<3> (); deallog << "OK" << std::endl; }
sairajat/dealii
tests/tensors/symmetric_tensor_07.cc
C++
lgpl-2.1
2,531
// The libMesh Finite Element Library. // Copyright (C) 2002-2017 Benjamin S. Kirk, John W. Peterson, Roy H. Stogner // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // C++ includes // Local includes // ------------------------------------------------------------ // SteadySystem implementation
vikramvgarg/libmesh
src/systems/steady_system.C
C++
lgpl-2.1
986
<html> <head> <meta charset='utf-8'> <style> .pass { font-weight: bold; color: green; } .fail { font-weight: bold; color: red; } </style> <script> if (window.testRunner) testRunner.dumpAsText(); function SputnikError(message) { this.message = message; } SputnikError.prototype.toString = function () { return 'SputnikError: ' + this.message; }; var sputnikException; function testPrint(msg) { var span = document.createElement("span"); document.getElementById("console").appendChild(span); // insert it first so XHTML knows the namespace span.innerHTML = msg + '<br />'; } function escapeHTML(text) { return text.toString().replace(/&/g, "&amp;").replace(/</g, "&lt;"); } function printTestPassed(msg) { testPrint('<span><span class="pass">PASS</span> ' + escapeHTML(msg) + '</span>'); } function printTestFailed(msg) { testPrint('<span><span class="fail">FAIL</span> ' + escapeHTML(msg) + '</span>'); } function testFailed(msg) { throw new SputnikError(msg); } var successfullyParsed = false; </script> </head> <body> <p>S12.8_A6</p> <div id='console'></div> <script> try { /** * @name: S12.8_A6; * @section: 12.8; * @assertion: Appearing of "break" within a function call that is nested in a IterationStatement yields SyntaxError; * @description: Checking if using "break Identifier" within a function body appears to be invalid; * @negative; */ var x=0,y=0; LABEL1 : do { x++; (function(){break LABEL1;})(); y++; } while(0); } catch (ex) { sputnikException = ex; } var successfullyParsed = true; </script> <script> if (!successfullyParsed) printTestPassed('Expected parsing failure'); else if (sputnikException) printTestPassed(sputnikException); else printTestFailed("No error detected"); testPrint('<br /><span class="pass">TEST COMPLETE</span>'); </script> </body> </html>
youfoh/webkit-efl
LayoutTests/sputnik/Conformance/12_Statement/12.8_The_break_Statement/S12.8_A6.html
HTML
lgpl-2.1
1,881
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class PyPyani(PythonPackage): """pyani is a Python3 module that provides support for calculating average nucleotide identity (ANI) and related measures for whole genome comparisons, and rendering relevant graphical summary output. Where available, it takes advantage of multicore systems, and can integrate with SGE/OGE-type job schedulers for the sequence comparisons.""" homepage = "http://widdowquinn.github.io/pyani" url = "https://pypi.io/packages/source/p/pyani/pyani-0.2.7.tar.gz" version('0.2.7', '239ba630d375a81c35b7c60fb9bec6fa') version('0.2.6', 'd5524b9a3c62c36063ed474ea95785c9') depends_on('python@3.5:') depends_on('py-setuptools', type='build') depends_on('py-matplotlib', type=('build', 'run')) depends_on('py-seaborn', type=('build', 'run')) # Required for ANI analysis depends_on('py-biopython', type=('build', 'run')) depends_on('py-pandas', type=('build', 'run')) depends_on('py-scipy', type=('build', 'run')) # Required for ANIb analysis depends_on('blast-plus~python', type='run') # Required for ANIm analysis depends_on('mummer', type='run')
EmreAtes/spack
var/spack/repos/builtin/packages/py-pyani/package.py
Python
lgpl-2.1
2,432
// --------------------------------------------------------------------- // // Copyright (C) 2008 - 2015 by the deal.II authors // // This file is part of the deal.II library. // // The deal.II library is free software; you can use it, redistribute // it, and/or modify it under the terms of the GNU Lesser General // Public License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // The full text of the license can be found in the file LICENSE at // the top level of the deal.II distribution. // // --------------------------------------------------------------------- // a test for WorkStream where we really do write conflicting entries // into a global vector #include "../tests.h" #include <deal.II/base/work_stream.h> #include <deal.II/lac/vector.h> Vector<double> result(100); struct ScratchData {}; struct CopyData { unsigned int computed; }; void worker (const std::vector<unsigned int>::iterator &i, ScratchData &, CopyData &ad) { ad.computed = *i * 2; } void copier (const CopyData &ad) { // write into the five elements of 'result' starting at ad.computed%result.size() for (unsigned int j=0; j<5; ++j) result((ad.computed+j) % result.size()) += ad.computed; } void test () { std::vector<unsigned int> v; for (unsigned int i=0; i<200; ++i) v.push_back (i); WorkStream::run (v.begin(), v.end(), &worker, &copier, ScratchData(), CopyData()); // now simulate what we should have gotten Vector<double> comp(result.size()); for (unsigned int i=0; i<v.size(); ++i) { const unsigned int ad_computed = v[i] * 2; for (unsigned int j=0; j<5; ++j) comp((ad_computed+j) % result.size()) += ad_computed; } // and compare for (unsigned int i=0; i<result.size(); ++i) AssertThrow (result(i) == comp(i), ExcInternalError()); for (unsigned int i=0; i<result.size(); ++i) deallog << result(i) << std::endl; } int main() { initlog(); test (); }
kalj/dealii
tests/base/work_stream_05.cc
C++
lgpl-2.1
2,078
package soot.jimple.validation; import java.util.List; import soot.Body; import soot.SootMethod; import soot.Unit; import soot.jimple.IdentityStmt; import soot.jimple.ParameterRef; import soot.jimple.ThisRef; import soot.util.Chain; import soot.validation.BodyValidator; import soot.validation.ValidationException; public enum IdentityStatementsValidator implements BodyValidator { INSTANCE; public static IdentityStatementsValidator v() { return INSTANCE; } @Override /** * Checks the following invariants on this Jimple body: * <ol> * <li> this-references may only occur in instance methods * <li> this-references may only occur as the first statement in a method, if they occur at all * <li> param-references must precede all statements that are not themselves param-references or this-references, * if they occur at all * </ol> */ public void validate(Body body, List<ValidationException> exception) { SootMethod method = body.getMethod(); if (method.isAbstract()) return; Chain<Unit> units=body.getUnits().getNonPatchingChain(); boolean foundNonThisOrParamIdentityStatement = false; boolean firstStatement = true; for (Unit unit : units) { if(unit instanceof IdentityStmt) { IdentityStmt identityStmt = (IdentityStmt) unit; if(identityStmt.getRightOp() instanceof ThisRef) { if(method.isStatic()) { exception.add(new ValidationException(identityStmt, "@this-assignment in a static method!")); } if(!firstStatement) { exception.add(new ValidationException(identityStmt, "@this-assignment statement should precede all other statements" +"\n method: "+ method)); } } else if(identityStmt.getRightOp() instanceof ParameterRef) { if(foundNonThisOrParamIdentityStatement) { exception.add(new ValidationException(identityStmt, "@param-assignment statements should precede all non-identity statements" +"\n method: "+ method)); } } else { //@caughtexception statement foundNonThisOrParamIdentityStatement = true; } } else { //non-identity statement foundNonThisOrParamIdentityStatement = true; } firstStatement = false; } } @Override public boolean isBasicValidator() { return true; } }
mbenz89/soot
src/soot/jimple/validation/IdentityStatementsValidator.java
Java
lgpl-2.1
2,322
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> <html> <head> <script src="resources/SVGTestCase.js"></script> <script src="../../fast/js/resources/js-test-pre.js"></script> <script src="../../fast/repaint/resources/repaint.js"></script> </head> <body onload="runRepaintTest()"> <h1>SVG 1.1 dynamic update tests</h1> <p id="description"></p> <div id="console"></div> <script src="script-tests/SVGFEDisplacementMapElement-dom-scale-attr.js"></script> </body> </html>
youfoh/webkit-efl
LayoutTests/svg/dynamic-updates/SVGFEDisplacementMapElement-dom-scale-attr.html
HTML
lgpl-2.1
465
MODULE=esp_idf_nvs_flash include $(RIOTBASE)/Makefile.base CFLAGS += -DESP_PLATFORM INCLUDES += -I$(RIOTCPU)/$(CPU)/vendor/esp-idf/include/log INCLUDES += -I$(ESP32_SDK_DIR)/components/nvs_flash/include INCLUDES += -I$(ESP32_SDK_DIR)/components/spi_flash/include
aabadie/RIOT
cpu/esp32/vendor/esp-idf/nvs_flash/Makefile
Makefile
lgpl-2.1
267
package org.intermine.objectstore.intermine; /* * Copyright (C) 2002-2016 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.sql.Connection; import junit.framework.Test; import org.apache.log4j.Logger; import org.intermine.model.InterMineObject; import org.intermine.model.testmodel.Employee; import org.intermine.objectstore.ObjectStoreException; import org.intermine.objectstore.ObjectStoreWriterFactory; import org.intermine.objectstore.ObjectStoreWriterTestCase; public class ObjectStoreWriterInterMineImplTest extends ObjectStoreWriterTestCase { @SuppressWarnings("unused") private static final Logger LOG = Logger.getLogger(ObjectStoreInterMineImpl.class); public static void oneTimeSetUp() throws Exception { writer = ObjectStoreWriterFactory.getObjectStoreWriter("osw.unittest"); ObjectStoreWriterTestCase.oneTimeSetUp(); } public static void oneTimeTearDown() throws Exception { ObjectStoreWriterTestCase.oneTimeTearDown(); writer.close(); } public ObjectStoreWriterInterMineImplTest(String arg) throws Exception { super(arg); } public static Test suite() { return buildSuite(ObjectStoreWriterInterMineImplTest.class); } /* public static void testLargeQuantitiesOfStuff() throws Exception { StringBuffer sb = new StringBuffer(); for (int i = 0; i < 10000; i++) { sb.append("lkjhaskjfhlsdakf hsdkljf hasdlkf sakf daslhf dskhf ldskf dslkf sdlkf alskf"); } try { { writer.beginTransaction(); //Company cycle[] = new Company[10]; int count = 0; for (int i = 0; i < 1000; i++) { Company fred = (Company) DynamicUtil.createObject(Collections.singleton(Company.class)); //Employee fred = new Employee(); fred.setName(sb.toString() + i); writer.store(fred); count++; //cycle[count % 10] = fred; if ((count % 10) == 0) { LOG.info("Writing Companies - done " + count); } } //cycle = null; writer.commitTransaction(); Connection con = ((ObjectStoreWriterInterMineImpl) writer).getConnection(); con.createStatement().execute("analyse"); ((ObjectStoreWriterInterMineImpl) writer).releaseConnection(con); } Query q = new Query(); //QueryClass qc = new QueryClass(Employee.class); QueryClass qc = new QueryClass(Company.class); q.addFrom(qc); q.addToSelect(qc); q.setConstraint(new SimpleConstraint(new QueryField(qc, "name"), ConstraintOp.MATCHES, new QueryValue("lkjhask%"))); { SingletonResults employees = new SingletonResults(q, writer.getObjectStore(), writer.getObjectStore().getSequence()); employees.setNoExplain(); employees.setBatchSize(20); writer.beginTransaction(); int count = 0; Iterator empIter = employees.iterator(); while (empIter.hasNext()) { Company c = (Company) empIter.next(); //Employee e = (Employee) empIter.next(); //Company c = (Company) DynamicUtil.createObject(Collections.singleton(Company.class)); //c.setName(e.getName()); c.setVatNumber(count); //e.setAge(count); writer.store(c); count++; if ((count % 10) == 0) { LOG.info("Altering Companies - done " + count); } } writer.commitTransaction(); } { SingletonResults employees = new SingletonResults(q, writer.getObjectStore(), writer.getObjectStore().getSequence()); employees.setNoExplain(); employees.setBatchSize(20); writer.beginTransaction(); int count = 0; Iterator empIter = employees.iterator(); while (empIter.hasNext()) { writer.delete((InterMineObject) empIter.next()); count++; if ((count % 10) == 0) { LOG.info("Deleting Companies - done " + count); } } //Query q2 = new Query(); //QueryClass qc2 = new QueryClass(Company.class); //q2.addFrom(qc2); //q2.addToSelect(qc2); //q2.setConstraint(new SimpleConstraint(new QueryField(qc2, "name"), ConstraintOp.MATCHES, new QueryValue("Fred %"))); //SingletonResults companies = new SingletonResults(q2, writer.getObjectStore(), writer.getObjectStore().getSequence()); //companies.setNoExplain(); //companies.setBatchSize(20000); //count = 0; //Iterator compIter = companies.iterator(); //while (compIter.hasNext()) { // Company c = (Company) compIter.next(); // writer.delete(c); // count++; // if ((count % 10000) == 0) { // LOG.info("Deleting Employees - done " + count); // } //} writer.commitTransaction(); } } finally { //System.gc(); //System.exit(0); if (writer.isInTransaction()) { writer.abortTransaction(); } //Connection c = ((ObjectStoreWriterInterMineImpl) writer).getConnection(); //c.createStatement().execute("delete from employee"); //c.createStatement().execute("delete from employable"); //c.createStatement().execute("delete from hasaddress"); //c.createStatement().execute("delete from company"); //c.createStatement().execute("delete from hassecretarys"); //c.createStatement().execute("delete from intermineobject"); //c.createStatement().execute("delete from randominterface"); //c.createStatement().execute("delete from thing"); //c.createStatement().execute("vacuum full"); //((ObjectStoreWriterInterMineImpl) writer).releaseConnection(c); } }*/ private boolean finished = false; private Throwable failureException = null; public void testRapidShutdown() throws Exception { Thread t = new Thread(new ShutdownThread()); t.start(); synchronized (this) { try { wait(5000); } catch (InterruptedException e) { } assertTrue(finished); if (failureException != null) { throw new Exception(failureException); } } } public synchronized void signalFinished(Throwable e) { finished = true; failureException = e; notifyAll(); } private class ShutdownThread implements Runnable { public void run() { try { Connection c = null; ObjectStoreWriterInterMineImpl w = (ObjectStoreWriterInterMineImpl) ObjectStoreWriterFactory.getObjectStoreWriter("osw.unittest"); c = w.getConnection(); try { w.close(); fail("Expected an ObjectStoreException"); } catch (ObjectStoreException e) { assertEquals("Closed ObjectStoreWriter while it is being used. Note this writer will be automatically closed when the current operation finishes", e.getMessage()); } w.releaseConnection(c); signalFinished(null); } catch (Throwable e) { System.out.println("Error in ShutdownThread: " + e); signalFinished(e); } } } public void testExceptionOutOfTransaction() throws Exception { assertFalse(writer.isInTransaction()); // First, cause an exception outside a transaction try { writer.store(new Employee() { public Integer getId() { throw new RuntimeException(); } public void setId(Integer id) { throw new RuntimeException(); } }); } catch (Exception e) { } assertFalse(writer.isInTransaction()); // Now try and do something normal. Object o = writer.getObjectById(new Integer(2)); } }
zebrafishmine/intermine
intermine/objectstore/test/src/org/intermine/objectstore/intermine/ObjectStoreWriterInterMineImplTest.java
Java
lgpl-2.1
9,113
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Editres(AutotoolsPackage): """Dynamic resource editor for X Toolkit applications.""" homepage = "http://cgit.freedesktop.org/xorg/app/editres" url = "https://www.x.org/archive/individual/app/editres-1.0.6.tar.gz" version('1.0.6', '310c504347ca499874593ac96e935353') depends_on('libxaw') depends_on('libx11') depends_on('libxt') depends_on('libxmu') depends_on('pkgconfig', type='build') depends_on('util-macros', type='build')
EmreAtes/spack
var/spack/repos/builtin/packages/editres/package.py
Python
lgpl-2.1
1,739
/* * Copyright (c) 2010 Wind River Systems; see * guts/COPYRIGHT for information. * * static struct group * * wrap_getgrent(void) { * struct group * rc = NULL; */ static struct group grp; static char grbuf[PSEUDO_PWD_MAX]; int r_rc; r_rc = wrap_getgrent_r(&grp, grbuf, PSEUDO_PWD_MAX, &rc); /* different error return conventions */ if (r_rc != 0) { errno = r_rc; } /* return rc; * } */
incandescant/pseudo
ports/uids_generic/guts/getgrent.c
C
lgpl-2.1
407
/***************************************************************************** * * This file is part of Mapnik (c++ mapping toolkit) * * Copyright (C) 2006 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * *****************************************************************************/ //$Id: image_reader.cpp 17 2005-03-08 23:58:43Z pavlenko $ #include <mapnik/image_reader.hpp> #include <mapnik/image_util.hpp> #include <mapnik/factory.hpp> namespace mapnik { typedef factory<image_reader,std::string, image_reader* (*)(const std::string&)> ImageReaderFactory; bool register_image_reader(const std::string& type,image_reader* (* fun)(const std::string&)) { return ImageReaderFactory::instance()->register_product(type,fun); } image_reader* get_image_reader(const std::string& filename,const std::string& type) { return ImageReaderFactory::instance()->create_object(type,filename); } image_reader* get_image_reader(const std::string& filename) { boost::optional<std::string> type = type_from_filename(filename); if (type) { return ImageReaderFactory::instance()->create_object(*type,filename); } return 0; } }
carlos-lopez-garces/mapnik-trunk
src/image_reader.cpp
C++
lgpl-2.1
1,901
/* Copyright 2011, AUTHORS.txt (http://ui.operamasks.org/about) Dual licensed under the MIT or LGPL Version 2 licenses. */ /** * @fileOverview Undo/Redo system for saving shapshot for document modification * and other recordable changes. */ (function() { OMEDITOR.plugins.add( 'undo', { requires : [ 'selection', 'wysiwygarea' ], init : function( editor ) { var undoManager = new UndoManager( editor ); var undoCommand = editor.addCommand( 'undo', { exec : function() { if ( undoManager.undo() ) { editor.selectionChange(); this.fire( 'afterUndo' ); } }, state : OMEDITOR.TRISTATE_DISABLED, canUndo : false }); var redoCommand = editor.addCommand( 'redo', { exec : function() { if ( undoManager.redo() ) { editor.selectionChange(); this.fire( 'afterRedo' ); } }, state : OMEDITOR.TRISTATE_DISABLED, canUndo : false }); undoManager.onChange = function() { undoCommand.setState( undoManager.undoable() ? OMEDITOR.TRISTATE_OFF : OMEDITOR.TRISTATE_DISABLED ); redoCommand.setState( undoManager.redoable() ? OMEDITOR.TRISTATE_OFF : OMEDITOR.TRISTATE_DISABLED ); }; function recordCommand( event ) { // If the command hasn't been marked to not support undo. if ( undoManager.enabled && event.data.command.canUndo !== false ) undoManager.save(); } // We'll save snapshots before and after executing a command. editor.on( 'beforeCommandExec', recordCommand ); editor.on( 'afterCommandExec', recordCommand ); // Save snapshots before doing custom changes. editor.on( 'saveSnapshot', function() { undoManager.save(); }); // Registering keydown on every document recreation.(#3844) editor.on( 'contentDom', function() { editor.document.on( 'keydown', function( event ) { // Do not capture CTRL hotkeys. if ( !event.data.$.ctrlKey && !event.data.$.metaKey ) undoManager.type( event ); }); }); // Always save an undo snapshot - the previous mode might have // changed editor contents. editor.on( 'beforeModeUnload', function() { editor.mode == 'wysiwyg' && undoManager.save( true ); }); // Make the undo manager available only in wysiwyg mode. editor.on( 'mode', function() { undoManager.enabled = editor.readOnly ? false : editor.mode == 'wysiwyg'; undoManager.onChange(); }); editor.ui.addButton( 'Undo', { label : editor.lang.undo, command : 'undo' }); editor.ui.addButton( 'Redo', { label : editor.lang.redo, command : 'redo' }); editor.resetUndo = function() { // Reset the undo stack. undoManager.reset(); // Create the first image. editor.fire( 'saveSnapshot' ); }; /** * Update the undo stacks with any subsequent DOM changes after this call. * @name OMEDITOR.editor#updateUndo * @example * function() * { * editor.fire( 'updateSnapshot' ); * ... * // Ask to include subsequent (in this call stack) DOM changes to be * // considered as part of the first snapshot. * editor.fire( 'updateSnapshot' ); * editor.document.body.append(...); * ... * } */ editor.on( 'updateSnapshot', function() { if ( undoManager.currentImage && new Image( editor ).equals( undoManager.currentImage ) ) setTimeout( function() { undoManager.update(); }, 0 ); }); } }); OMEDITOR.plugins.undo = {}; /** * Undo snapshot which represents the current document status. * @name OMEDITOR.plugins.undo.Image * @param editor The editor instance on which the image is created. */ var Image = OMEDITOR.plugins.undo.Image = function( editor ) { this.editor = editor; editor.fire( 'beforeUndoImage' ); var contents = editor.getSnapshot(), selection = contents && editor.getSelection(); // In IE, we need to remove the expando attributes. OMEDITOR.env.ie && contents && ( contents = contents.replace( /\s+data-cke-expando=".*?"/g, '' ) ); this.contents = contents; this.bookmarks = selection && selection.createBookmarks2( true ); editor.fire( 'afterUndoImage' ); }; // Attributes that browser may changing them when setting via innerHTML. var protectedAttrs = /\b(?:href|src|name)="[^"]*?"/gi; Image.prototype = { equals : function( otherImage, contentOnly ) { var thisContents = this.contents, otherContents = otherImage.contents; // For IE6/7 : Comparing only the protected attribute values but not the original ones.(#4522) if ( OMEDITOR.env.ie && ( OMEDITOR.env.ie7Compat || OMEDITOR.env.ie6Compat ) ) { thisContents = thisContents.replace( protectedAttrs, '' ); otherContents = otherContents.replace( protectedAttrs, '' ); } if ( thisContents != otherContents ) return false; if ( contentOnly ) return true; var bookmarksA = this.bookmarks, bookmarksB = otherImage.bookmarks; if ( bookmarksA || bookmarksB ) { if ( !bookmarksA || !bookmarksB || bookmarksA.length != bookmarksB.length ) return false; for ( var i = 0 ; i < bookmarksA.length ; i++ ) { var bookmarkA = bookmarksA[ i ], bookmarkB = bookmarksB[ i ]; if ( bookmarkA.startOffset != bookmarkB.startOffset || bookmarkA.endOffset != bookmarkB.endOffset || !OMEDITOR.tools.arrayCompare( bookmarkA.start, bookmarkB.start ) || !OMEDITOR.tools.arrayCompare( bookmarkA.end, bookmarkB.end ) ) { return false; } } } return true; } }; /** * @constructor Main logic for Redo/Undo feature. */ function UndoManager( editor ) { this.editor = editor; // Reset the undo stack. this.reset(); } var editingKeyCodes = { /*Backspace*/ 8:1, /*Delete*/ 46:1 }, modifierKeyCodes = { /*Shift*/ 16:1, /*Ctrl*/ 17:1, /*Alt*/ 18:1 }, navigationKeyCodes = { 37:1, 38:1, 39:1, 40:1 }; // Arrows: L, T, R, B UndoManager.prototype = { /** * Process undo system regard keystrikes. * @param {OMEDITOR.dom.event} event */ type : function( event ) { var keystroke = event && event.data.getKey(), isModifierKey = keystroke in modifierKeyCodes, isEditingKey = keystroke in editingKeyCodes, wasEditingKey = this.lastKeystroke in editingKeyCodes, sameAsLastEditingKey = isEditingKey && keystroke == this.lastKeystroke, // Keystrokes which navigation through contents. isReset = keystroke in navigationKeyCodes, wasReset = this.lastKeystroke in navigationKeyCodes, // Keystrokes which just introduce new contents. isContent = ( !isEditingKey && !isReset ), // Create undo snap for every different modifier key. modifierSnapshot = ( isEditingKey && !sameAsLastEditingKey ), // Create undo snap on the following cases: // 1. Just start to type . // 2. Typing some content after a modifier. // 3. Typing some content after make a visible selection. startedTyping = !( isModifierKey || this.typing ) || ( isContent && ( wasEditingKey || wasReset ) ); if ( startedTyping || modifierSnapshot ) { var beforeTypeImage = new Image( this.editor ); // Use setTimeout, so we give the necessary time to the // browser to insert the character into the DOM. OMEDITOR.tools.setTimeout( function() { var currentSnapshot = this.editor.getSnapshot(); // In IE, we need to remove the expando attributes. if ( OMEDITOR.env.ie ) currentSnapshot = currentSnapshot.replace( /\s+data-cke-expando=".*?"/g, '' ); if ( beforeTypeImage.contents != currentSnapshot ) { // It's safe to now indicate typing state. this.typing = true; // This's a special save, with specified snapshot // and without auto 'fireChange'. if ( !this.save( false, beforeTypeImage, false ) ) // Drop future snapshots. this.snapshots.splice( this.index + 1, this.snapshots.length - this.index - 1 ); this.hasUndo = true; this.hasRedo = false; this.typesCount = 1; this.modifiersCount = 1; this.onChange(); } }, 0, this ); } this.lastKeystroke = keystroke; // Create undo snap after typed too much (over 25 times). if ( isEditingKey ) { this.typesCount = 0; this.modifiersCount++; if ( this.modifiersCount > 25 ) { this.save( false, null, false ); this.modifiersCount = 1; } } else if ( !isReset ) { this.modifiersCount = 0; this.typesCount++; if ( this.typesCount > 25 ) { this.save( false, null, false ); this.typesCount = 1; } } }, reset : function() // Reset the undo stack. { /** * Remember last pressed key. */ this.lastKeystroke = 0; /** * Stack for all the undo and redo snapshots, they're always created/removed * in consistency. */ this.snapshots = []; /** * Current snapshot history index. */ this.index = -1; this.limit = this.editor.config.undoStackSize || 20; this.currentImage = null; this.hasUndo = false; this.hasRedo = false; this.resetType(); }, /** * Reset all states about typing. * @see UndoManager.type */ resetType : function() { this.typing = false; delete this.lastKeystroke; this.typesCount = 0; this.modifiersCount = 0; }, fireChange : function() { this.hasUndo = !!this.getNextImage( true ); this.hasRedo = !!this.getNextImage( false ); // Reset typing this.resetType(); this.onChange(); }, /** * Save a snapshot of document image for later retrieve. */ save : function( onContentOnly, image, autoFireChange ) { var snapshots = this.snapshots; // Get a content image. if ( !image ) image = new Image( this.editor ); // Do nothing if it was not possible to retrieve an image. if ( image.contents === false ) return false; // Check if this is a duplicate. In such case, do nothing. if ( this.currentImage && image.equals( this.currentImage, onContentOnly ) ) return false; // Drop future snapshots. snapshots.splice( this.index + 1, snapshots.length - this.index - 1 ); // If we have reached the limit, remove the oldest one. if ( snapshots.length == this.limit ) snapshots.shift(); // Add the new image, updating the current index. this.index = snapshots.push( image ) - 1; this.currentImage = image; if ( autoFireChange !== false ) this.fireChange(); return true; }, restoreImage : function( image ) { this.editor.loadSnapshot( image.contents ); if ( image.bookmarks ) this.editor.getSelection().selectBookmarks( image.bookmarks ); else if ( OMEDITOR.env.ie ) { // IE BUG: If I don't set the selection to *somewhere* after setting // document contents, then IE would create an empty paragraph at the bottom // the next time the document is modified. var $range = this.editor.document.getBody().$.createTextRange(); $range.collapse( true ); $range.select(); } this.index = image.index; // Update current image with the actual editor // content, since actualy content may differ from // the original snapshot due to dom change. (#4622) this.update(); this.fireChange(); }, // Get the closest available image. getNextImage : function( isUndo ) { var snapshots = this.snapshots, currentImage = this.currentImage, image, i; if ( currentImage ) { if ( isUndo ) { for ( i = this.index - 1 ; i >= 0 ; i-- ) { image = snapshots[ i ]; if ( !currentImage.equals( image, true ) ) { image.index = i; return image; } } } else { for ( i = this.index + 1 ; i < snapshots.length ; i++ ) { image = snapshots[ i ]; if ( !currentImage.equals( image, true ) ) { image.index = i; return image; } } } } return null; }, /** * Check the current redo state. * @return {Boolean} Whether the document has previous state to * retrieve. */ redoable : function() { return this.enabled && this.hasRedo; }, /** * Check the current undo state. * @return {Boolean} Whether the document has future state to restore. */ undoable : function() { return this.enabled && this.hasUndo; }, /** * Perform undo on current index. */ undo : function() { if ( this.undoable() ) { this.save( true ); var image = this.getNextImage( true ); if ( image ) return this.restoreImage( image ), true; } return false; }, /** * Perform redo on current index. */ redo : function() { if ( this.redoable() ) { // Try to save. If no changes have been made, the redo stack // will not change, so it will still be redoable. this.save( true ); // If instead we had changes, we can't redo anymore. if ( this.redoable() ) { var image = this.getNextImage( false ); if ( image ) return this.restoreImage( image ), true; } } return false; }, /** * Update the last snapshot of the undo stack with the current editor content. */ update : function() { this.snapshots.splice( this.index, 1, ( this.currentImage = new Image( this.editor ) ) ); } }; })(); /** * The number of undo steps to be saved. The higher this setting value the more * memory is used for it. * @name OMEDITOR.config.undoStackSize * @type Number * @default 20 * @example * config.undoStackSize = 50; */ /** * Fired when the editor is about to save an undo snapshot. This event can be * fired by plugins and customizations to make the editor saving undo snapshots. * @name OMEDITOR.editor#saveSnapshot * @event */ /** * Fired before an undo image is to be taken. An undo image represents the * editor state at some point. It's saved into an undo store, so the editor is * able to recover the editor state on undo and redo operations. * @name OMEDITOR.editor#beforeUndoImage * @since 3.5.3 * @see OMEDITOR.editor#afterUndoImage * @event */ /** * Fired after an undo image is taken. An undo image represents the * editor state at some point. It's saved into an undo store, so the editor is * able to recover the editor state on undo and redo operations. * @name OMEDITOR.editor#afterUndoImage * @since 3.5.3 * @see OMEDITOR.editor#beforeUndoImage * @event */
yonghuang/fastui
samplecenter/basic/timeTest/operamasks-ui-2.0/development-bundle/ui/editor/_source/plugins/undo/plugin.js
JavaScript
lgpl-3.0
14,545
/* * SonarQube, open source software quality management tool. * Copyright (C) 2008-2014 SonarSource * mailto:contact AT sonarsource DOT com * * SonarQube is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version. * * SonarQube is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package org.sonar.server.computation.step; import org.assertj.core.data.Offset; import org.junit.Rule; import org.junit.Test; import org.sonar.server.computation.batch.TreeRootHolderRule; import org.sonar.server.computation.component.FileAttributes; import org.sonar.server.computation.measure.MeasureRepositoryRule; import org.sonar.server.computation.metric.MetricRepositoryRule; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.guava.api.Assertions.assertThat; import static org.sonar.api.measures.CoreMetrics.SKIPPED_TESTS; import static org.sonar.api.measures.CoreMetrics.SKIPPED_TESTS_KEY; import static org.sonar.api.measures.CoreMetrics.TESTS; import static org.sonar.api.measures.CoreMetrics.TESTS_KEY; import static org.sonar.api.measures.CoreMetrics.TEST_ERRORS; import static org.sonar.api.measures.CoreMetrics.TEST_ERRORS_KEY; import static org.sonar.api.measures.CoreMetrics.TEST_EXECUTION_TIME; import static org.sonar.api.measures.CoreMetrics.TEST_EXECUTION_TIME_KEY; import static org.sonar.api.measures.CoreMetrics.TEST_FAILURES; import static org.sonar.api.measures.CoreMetrics.TEST_FAILURES_KEY; import static org.sonar.api.measures.CoreMetrics.TEST_SUCCESS_DENSITY; import static org.sonar.api.measures.CoreMetrics.TEST_SUCCESS_DENSITY_KEY; import static org.sonar.server.computation.component.Component.Type.DIRECTORY; import static org.sonar.server.computation.component.Component.Type.FILE; import static org.sonar.server.computation.component.Component.Type.MODULE; import static org.sonar.server.computation.component.Component.Type.PROJECT; import static org.sonar.server.computation.component.ReportComponent.builder; import static org.sonar.server.computation.measure.Measure.newMeasureBuilder; import static org.sonar.server.computation.measure.MeasureRepoEntry.entryOf; import static org.sonar.server.computation.measure.MeasureRepoEntry.toEntries; public class ReportUnitTestMeasuresStepTest { private static final Offset<Double> DEFAULT_OFFSET = Offset.offset(0.01d); private static final int ROOT_REF = 1; private static final int MODULE_REF = 12; private static final int SUB_MODULE_REF = 123; private static final int DIRECTORY_REF = 1234; private static final int FILE_1_REF = 12341; private static final int FILE_2_REF = 12342; @Rule public TreeRootHolderRule treeRootHolder = new TreeRootHolderRule() .setRoot( builder(PROJECT, ROOT_REF) .addChildren( builder(MODULE, MODULE_REF) .addChildren( builder(MODULE, SUB_MODULE_REF) .addChildren( builder(DIRECTORY, DIRECTORY_REF) .addChildren( builder(FILE, FILE_1_REF).setFileAttributes(new FileAttributes(true, null)).build(), builder(FILE, FILE_2_REF).setFileAttributes(new FileAttributes(true, null)).build()) .build()) .build()) .build()) .build()); @Rule public MetricRepositoryRule metricRepository = new MetricRepositoryRule() .add(TESTS) .add(TEST_ERRORS) .add(TEST_FAILURES) .add(TEST_EXECUTION_TIME) .add(SKIPPED_TESTS) .add(TEST_SUCCESS_DENSITY); @Rule public MeasureRepositoryRule measureRepository = MeasureRepositoryRule.create(treeRootHolder, metricRepository); ComputationStep underTest = new UnitTestMeasuresStep(treeRootHolder, metricRepository, measureRepository); @Test public void aggregate_tests() { checkMeasuresAggregation(TESTS_KEY, 100, 400, 500); } @Test public void aggregate_tests_in_errors() { checkMeasuresAggregation(TEST_ERRORS_KEY, 100, 400, 500); } @Test public void aggregate_tests_in_failures() { checkMeasuresAggregation(TEST_FAILURES_KEY, 100, 400, 500); } @Test public void aggregate_tests_execution_time() { checkMeasuresAggregation(TEST_EXECUTION_TIME_KEY, 100L, 400L, 500L); } @Test public void aggregate_skipped_tests_time() { checkMeasuresAggregation(SKIPPED_TESTS_KEY, 100, 400, 500); } @Test public void compute_test_success_density() { measureRepository.addRawMeasure(FILE_1_REF, TESTS_KEY, newMeasureBuilder().create(10)); measureRepository.addRawMeasure(FILE_2_REF, TESTS_KEY, newMeasureBuilder().create(20)); measureRepository.addRawMeasure(FILE_1_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(2)); measureRepository.addRawMeasure(FILE_2_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(5)); measureRepository.addRawMeasure(FILE_1_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(4)); measureRepository.addRawMeasure(FILE_2_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(1)); underTest.execute(); assertThat(toEntries(measureRepository.getAddedRawMeasures(FILE_1_REF))).contains(entryOf(TEST_SUCCESS_DENSITY_KEY, newMeasureBuilder().create(40d))); assertThat(toEntries(measureRepository.getAddedRawMeasures(FILE_2_REF))).contains(entryOf(TEST_SUCCESS_DENSITY_KEY, newMeasureBuilder().create(70d))); assertThat(toEntries(measureRepository.getAddedRawMeasures(DIRECTORY_REF))).contains(entryOf(TEST_SUCCESS_DENSITY_KEY, newMeasureBuilder().create(60d))); assertThat(toEntries(measureRepository.getAddedRawMeasures(SUB_MODULE_REF))).contains(entryOf(TEST_SUCCESS_DENSITY_KEY, newMeasureBuilder().create(60d))); assertThat(toEntries(measureRepository.getAddedRawMeasures(MODULE_REF))).contains(entryOf(TEST_SUCCESS_DENSITY_KEY, newMeasureBuilder().create(60d))); assertThat(toEntries(measureRepository.getAddedRawMeasures(ROOT_REF))).contains(entryOf(TEST_SUCCESS_DENSITY_KEY, newMeasureBuilder().create(60d))); } @Test public void compute_test_success_density_when_zero_tests_in_errors() { measureRepository.addRawMeasure(FILE_1_REF, TESTS_KEY, newMeasureBuilder().create(10)); measureRepository.addRawMeasure(FILE_2_REF, TESTS_KEY, newMeasureBuilder().create(20)); measureRepository.addRawMeasure(FILE_1_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(0)); measureRepository.addRawMeasure(FILE_2_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(0)); measureRepository.addRawMeasure(FILE_1_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(4)); measureRepository.addRawMeasure(FILE_2_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(1)); underTest.execute(); assertThat(measureRepository.getAddedRawMeasure(FILE_1_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(60d); assertThat(measureRepository.getAddedRawMeasure(FILE_2_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(95d); assertThat(measureRepository.getAddedRawMeasure(DIRECTORY_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(83.3d, DEFAULT_OFFSET); assertThat(measureRepository.getAddedRawMeasure(SUB_MODULE_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(83.3d, DEFAULT_OFFSET); assertThat(measureRepository.getAddedRawMeasure(MODULE_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(83.3d, DEFAULT_OFFSET); assertThat(measureRepository.getAddedRawMeasure(ROOT_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(83.3d, DEFAULT_OFFSET); } @Test public void compute_test_success_density_when_zero_tests_in_failures() { measureRepository.addRawMeasure(FILE_1_REF, TESTS_KEY, newMeasureBuilder().create(10)); measureRepository.addRawMeasure(FILE_2_REF, TESTS_KEY, newMeasureBuilder().create(20)); measureRepository.addRawMeasure(FILE_1_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(2)); measureRepository.addRawMeasure(FILE_2_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(5)); measureRepository.addRawMeasure(FILE_1_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(0)); measureRepository.addRawMeasure(FILE_2_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(0)); underTest.execute(); assertThat(measureRepository.getAddedRawMeasure(FILE_1_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(80d); assertThat(measureRepository.getAddedRawMeasure(FILE_2_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(75d); assertThat(measureRepository.getAddedRawMeasure(DIRECTORY_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(76.7d, DEFAULT_OFFSET); assertThat(measureRepository.getAddedRawMeasure(SUB_MODULE_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(76.7d, DEFAULT_OFFSET); assertThat(measureRepository.getAddedRawMeasure(MODULE_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(76.7d, DEFAULT_OFFSET); assertThat(measureRepository.getAddedRawMeasure(ROOT_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(76.7d, DEFAULT_OFFSET); } @Test public void compute_100_percent_test_success_density_when_no_tests_in_errors_or_failures() { measureRepository.addRawMeasure(FILE_1_REF, TESTS_KEY, newMeasureBuilder().create(10)); measureRepository.addRawMeasure(FILE_2_REF, TESTS_KEY, newMeasureBuilder().create(20)); measureRepository.addRawMeasure(FILE_1_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(0)); measureRepository.addRawMeasure(FILE_2_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(0)); measureRepository.addRawMeasure(FILE_1_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(0)); measureRepository.addRawMeasure(FILE_2_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(0)); underTest.execute(); assertThat(measureRepository.getAddedRawMeasure(FILE_1_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(100d); assertThat(measureRepository.getAddedRawMeasure(FILE_2_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(100d); assertThat(measureRepository.getAddedRawMeasure(DIRECTORY_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(100d); assertThat(measureRepository.getAddedRawMeasure(SUB_MODULE_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(100d); assertThat(measureRepository.getAddedRawMeasure(MODULE_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(100d); assertThat(measureRepository.getAddedRawMeasure(ROOT_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(100d); } @Test public void compute_0_percent_test_success_density() { measureRepository.addRawMeasure(FILE_1_REF, TESTS_KEY, newMeasureBuilder().create(10)); measureRepository.addRawMeasure(FILE_2_REF, TESTS_KEY, newMeasureBuilder().create(20)); measureRepository.addRawMeasure(FILE_1_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(8)); measureRepository.addRawMeasure(FILE_2_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(15)); measureRepository.addRawMeasure(FILE_1_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(2)); measureRepository.addRawMeasure(FILE_2_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(5)); underTest.execute(); assertThat(measureRepository.getAddedRawMeasure(FILE_1_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(0d); assertThat(measureRepository.getAddedRawMeasure(FILE_2_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(0d); assertThat(measureRepository.getAddedRawMeasure(DIRECTORY_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(0d); assertThat(measureRepository.getAddedRawMeasure(SUB_MODULE_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(0d); assertThat(measureRepository.getAddedRawMeasure(MODULE_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(0d); assertThat(measureRepository.getAddedRawMeasure(ROOT_REF, TEST_SUCCESS_DENSITY_KEY).get().getDoubleValue()).isEqualTo(0d); } @Test public void do_not_compute_test_success_density_when_no_tests_in_errors() { measureRepository.addRawMeasure(FILE_1_REF, TESTS_KEY, newMeasureBuilder().create(10)); measureRepository.addRawMeasure(FILE_2_REF, TESTS_KEY, newMeasureBuilder().create(20)); measureRepository.addRawMeasure(FILE_1_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(4)); measureRepository.addRawMeasure(FILE_2_REF, TEST_FAILURES_KEY, newMeasureBuilder().create(1)); underTest.execute(); assertThat(measureRepository.getAddedRawMeasure(FILE_1_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(FILE_2_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(DIRECTORY_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(SUB_MODULE_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(MODULE_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(ROOT_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); } @Test public void do_not_compute_test_success_density_when_no_tests_in_failure() { measureRepository.addRawMeasure(FILE_1_REF, TESTS_KEY, newMeasureBuilder().create(10)); measureRepository.addRawMeasure(FILE_2_REF, TESTS_KEY, newMeasureBuilder().create(20)); measureRepository.addRawMeasure(FILE_1_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(0)); measureRepository.addRawMeasure(FILE_2_REF, TEST_ERRORS_KEY, newMeasureBuilder().create(0)); underTest.execute(); assertThat(measureRepository.getAddedRawMeasure(FILE_1_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(FILE_2_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(DIRECTORY_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(SUB_MODULE_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(MODULE_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(ROOT_REF, TEST_SUCCESS_DENSITY_KEY)).isAbsent(); } private void checkMeasuresAggregation(String metricKey, int file1Value, int file2Value, int expectedValue) { measureRepository.addRawMeasure(FILE_1_REF, metricKey, newMeasureBuilder().create(file1Value)); measureRepository.addRawMeasure(FILE_2_REF, metricKey, newMeasureBuilder().create(file2Value)); underTest.execute(); assertThat(measureRepository.getAddedRawMeasure(FILE_1_REF, metricKey)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(FILE_2_REF, metricKey)).isAbsent(); assertThat(toEntries(measureRepository.getAddedRawMeasures(DIRECTORY_REF))).containsOnly(entryOf(metricKey, newMeasureBuilder().create(expectedValue))); assertThat(toEntries(measureRepository.getAddedRawMeasures(SUB_MODULE_REF))).containsOnly(entryOf(metricKey, newMeasureBuilder().create(expectedValue))); assertThat(toEntries(measureRepository.getAddedRawMeasures(MODULE_REF))).containsOnly(entryOf(metricKey, newMeasureBuilder().create(expectedValue))); assertThat(toEntries(measureRepository.getAddedRawMeasures(ROOT_REF))).containsOnly(entryOf(metricKey, newMeasureBuilder().create(expectedValue))); } private void checkMeasuresAggregation(String metricKey, long file1Value, long file2Value, long expectedValue) { measureRepository.addRawMeasure(FILE_1_REF, metricKey, newMeasureBuilder().create(file1Value)); measureRepository.addRawMeasure(FILE_2_REF, metricKey, newMeasureBuilder().create(file2Value)); underTest.execute(); assertThat(measureRepository.getAddedRawMeasure(FILE_1_REF, metricKey)).isAbsent(); assertThat(measureRepository.getAddedRawMeasure(FILE_2_REF, metricKey)).isAbsent(); assertThat(toEntries(measureRepository.getAddedRawMeasures(DIRECTORY_REF))).containsOnly(entryOf(metricKey, newMeasureBuilder().create(expectedValue))); assertThat(toEntries(measureRepository.getAddedRawMeasures(SUB_MODULE_REF))).containsOnly(entryOf(metricKey, newMeasureBuilder().create(expectedValue))); assertThat(toEntries(measureRepository.getAddedRawMeasures(MODULE_REF))).containsOnly(entryOf(metricKey, newMeasureBuilder().create(expectedValue))); assertThat(toEntries(measureRepository.getAddedRawMeasures(ROOT_REF))).containsOnly(entryOf(metricKey, newMeasureBuilder().create(expectedValue))); } }
mohanaraosv/sonarqube
server/sonar-server/src/test/java/org/sonar/server/computation/step/ReportUnitTestMeasuresStepTest.java
Java
lgpl-3.0
17,146
/* * Copyright (c) 2009 Levente Farkas * Copyright (c) 2007, 2008 Wayne Meissner * * This file is part of gstreamer-java. * * This code is free software: you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License version 3 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * version 3 for more details. * * You should have received a copy of the GNU Lesser General Public License * version 3 along with this work. If not, see <http://www.gnu.org/licenses/>. */ package org.freedesktop.gstreamer.lowlevel; import org.freedesktop.gstreamer.GhostPad; import org.freedesktop.gstreamer.Pad; import org.freedesktop.gstreamer.PadTemplate; import org.freedesktop.gstreamer.lowlevel.annotations.CallerOwnsReturn; import com.sun.jna.Pointer; /** * GstGhostPad functions */ public interface GstGhostPadAPI extends com.sun.jna.Library { GstGhostPadAPI GSTGHOSTPAD_API = GstNative.load(GstGhostPadAPI.class); GType gst_ghost_pad_get_type(); @CallerOwnsReturn Pointer ptr_gst_ghost_pad_new(String name, Pad target); @CallerOwnsReturn Pointer ptr_gst_ghost_pad_new_no_target(String name, int direction); @CallerOwnsReturn Pointer ptr_gst_ghost_pad_new_from_template(String name, Pad target, PadTemplate templ); @CallerOwnsReturn Pointer ptr_gst_ghost_pad_new_no_target_from_template(String name, PadTemplate templ); @CallerOwnsReturn GhostPad gst_ghost_pad_new(String name, Pad target); @CallerOwnsReturn GhostPad gst_ghost_pad_new_no_target(String name, int direction); @CallerOwnsReturn GhostPad gst_ghost_pad_new_from_template(String name, Pad target, PadTemplate templ); @CallerOwnsReturn GhostPad gst_ghost_pad_new_no_target_from_template(String name, PadTemplate templ); @CallerOwnsReturn Pad gst_ghost_pad_get_target(GhostPad gpad); boolean gst_ghost_pad_set_target(GhostPad gpad, Pad newtarget); }
isaacrj/gst1-java-core
src/org/freedesktop/gstreamer/lowlevel/GstGhostPadAPI.java
Java
lgpl-3.0
2,152
-- Revert rename_clia_to_cle_for_analysis_projects BEGIN; ALTER TABLE config.analysis_project DROP COLUMN is_cle; COMMIT;
apregier/genome
sqitch/gms/revert/rename_clia_to_cle_for_analysis_projects.sql
SQL
lgpl-3.0
125
# This program is free software; you can redistribute it and/or modify # it under the terms of the (LGPL) GNU Lesser General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library Lesser General Public License for more details at # ( http://www.gnu.org/licenses/lgpl.html ). # # You should have received a copy of the GNU Lesser General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # written by: Jeff Ortel ( jortel@redhat.com ) """ The I{builder} module provides an wsdl/xsd defined types factory """ from logging import getLogger from suds import * from suds.sudsobject import Factory log = getLogger(__name__) class Builder: """ Builder used to construct an object for types defined in the schema """ def __init__(self, resolver): """ @param resolver: A schema object name resolver. @type resolver: L{resolver.Resolver} """ self.resolver = resolver def build(self, name): """ build a an object for the specified typename as defined in the schema """ if isinstance(name, str): type = self.resolver.find(name) if type is None: raise TypeNotFound(name) else: type = name cls = type.name if type.mixed(): data = Factory.property(cls) else: data = Factory.object(cls) resolved = type.resolve() md = data.__metadata__ md.sxtype = resolved md.ordering = self.ordering(resolved) history = [] self.add_attributes(data, resolved) for child, ancestry in type.children(): if self.skip_child(child, ancestry): continue self.process(data, child, history[:]) return data def process(self, data, type, history): """ process the specified type then process its children """ if type in history: return if type.enum(): return history.append(type) resolved = type.resolve() value = None if type.unbounded(): value = [] else: if len(resolved) > 0: if resolved.mixed(): value = Factory.property(resolved.name) md = value.__metadata__ md.sxtype = resolved else: value = Factory.object(resolved.name) md = value.__metadata__ md.sxtype = resolved md.ordering = self.ordering(resolved) setattr(data, type.name, value) if value is not None: data = value if not isinstance(data, list): self.add_attributes(data, resolved) for child, ancestry in resolved.children(): if self.skip_child(child, ancestry): continue self.process(data, child, history[:]) def add_attributes(self, data, type): """ add required attributes """ for attr, ancestry in type.attributes(): name = '_%s' % attr.name value = attr.get_default() setattr(data, name, value) def skip_child(self, child, ancestry): """ get whether or not to skip the specified child """ if child.any(): return True for x in ancestry: if x.choice(): return True return False def ordering(self, type): """ get the ordering """ result = [] for child, ancestry in type.resolve(): name = child.name if child.name is None: continue if child.isattr(): name = '_%s' % child.name result.append(name) return result
USGM/suds
suds/builder.py
Python
lgpl-3.0
4,213
/* * Ample SDK - JavaScript GUI Framework * * Copyright (c) 2012 Sergey Ilinsky, Zingus J. Rinkle * Dual licensed under the MIT and GPL licenses. * See: http://www.amplesdk.com/about/licensing/ * */ ample.locale.addCultureInfo("it", { messages: { // window/dialog/wizard "xul.dialog.button.accept": "OK", "xul.dialog.button.cancel": "Annulla", "xul.dialog.button.close": "Chiudi", "xul.dialog.button.help": "Aiuto", "xul.dialog.button.finish": "Fine", "xul.dialog.button.next": "Prossimo", "xul.dialog.button.previous": "Precedente", // editor "xul.editor.button.undo": "Annulla Inserimento", "xul.editor.button.redo": "Ripristina Inserimento", "xul.editor.button.justifyleft": "Allinea Testo a Sinistra", "xul.editor.button.justifycenter": "Allinea Testo al Centro", "xul.editor.button.justifyright": "Allinea Text a Destra", "xul.editor.button.justifyfull": "Allineamento giustificato", "xul.editor.button.outdent": "Diminuisci l'Indentazione del Paragrafo", "xul.editor.button.indent": "Aumenta l'Indentazione del Paragrafo", "xul.editor.button.insertunorderedlist":"Inizia una Lista puntata", "xul.editor.button.insertorderedlist": "Inizia una Lista numerata", "xul.editor.button.createlink": "Crea un Link", "xul.editor.button.unlink": "Rimuovi il Link", "xul.editor.button.bold": "Rendi grassetto il Testo selezionato", "xul.editor.button.italic": "Rendi corsivo il Testo selezionato", "xul.editor.button.underline": "Sottolinea il Testo selezionato", "xul.editor.button.strikethrough": "Sbarra il Testo selezionato", "xul.editor.button.subscript": "Sposta il Testo in alto (Apice)", "xul.editor.button.superscript": "Sposta il Testo in basso (Pedice)", "xul.editor.button.fontsize": "Cambia la Dimensione del Carattere", "xul.editor.button.fontname": "Cambia il Tipo di Carattere", "xul.editor.button.formatblock": "Formattazione del Paragrafo", "xul.editor.button.forecolor": "Cambia il colore del testo", "xul.editor.button.backcolor": "Cambia il colore di sfondo del testo", // Stupid text (please keep it for RCS and later editions) "{":"}" } });
zonebuilder/zonebuilder.github.io
jul/ample-sdk/ample/languages/xul/locales/it.js
JavaScript
lgpl-3.0
2,266
/* * Copyright 2001-2006 Stephen Colebourne * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.joda.time; /** * Exception thrown when attempting to set a field outside its supported range. * * @author Brian S O'Neill * @since 1.1 */ public class IllegalFieldValueException extends IllegalArgumentException { /** Serialization lock. */ private static final long serialVersionUID = 6305711765985447737L; /** * Creates a message for the exception. * * @param fieldName the field name * @param value the value rejected * @param lowerBound the lower bound allowed * @param upperBound the uppe bound allowed * @param explain an explanation * @return the message */ private static String createMessage(String fieldName, Number value, Number lowerBound, Number upperBound, String explain) { StringBuffer buf = new StringBuffer() .append("Value ").append(value).append(" for ").append(fieldName).append(' '); if (lowerBound == null) { if (upperBound == null) { buf.append("is not supported"); } else { buf.append("must not be larger than ").append(upperBound); } } else if (upperBound == null) { buf.append("must not be smaller than ").append(lowerBound); } else { buf.append("must be in the range [") .append(lowerBound) .append(',') .append(upperBound) .append(']'); } if (explain != null) { buf.append(": ").append(explain); } return buf.toString(); } /** * Creates a message for the exception. * * @param fieldName the field name * @param value the value rejected * @return the message */ private static String createMessage(String fieldName, String value) { StringBuffer buf = new StringBuffer().append("Value "); if (value == null) { buf.append("null"); } else { buf.append('"'); buf.append(value); buf.append('"'); } buf.append(" for ").append(fieldName).append(' ').append("is not supported"); return buf.toString(); } private final DateTimeFieldType iDateTimeFieldType; private final DurationFieldType iDurationFieldType; private final String iFieldName; private final Number iNumberValue; private final String iStringValue; private final Number iLowerBound; private final Number iUpperBound; private String iMessage; /** * Constructor. * * @param fieldType type of field being set * @param value illegal value being set * @param lowerBound lower legal field value, or null if not applicable * @param upperBound upper legal field value, or null if not applicable */ public IllegalFieldValueException(DateTimeFieldType fieldType, Number value, Number lowerBound, Number upperBound) { super(createMessage(fieldType.getName(), value, lowerBound, upperBound, null)); iDateTimeFieldType = fieldType; iDurationFieldType = null; iFieldName = fieldType.getName(); iNumberValue = value; iStringValue = null; iLowerBound = lowerBound; iUpperBound = upperBound; iMessage = super.getMessage(); } /** * Constructor. * * @param fieldType type of field being set * @param value illegal value being set * @param explain an explanation * @since 1.5 */ public IllegalFieldValueException(DateTimeFieldType fieldType, Number value, String explain) { super(createMessage(fieldType.getName(), value, null, null, explain)); iDateTimeFieldType = fieldType; iDurationFieldType = null; iFieldName = fieldType.getName(); iNumberValue = value; iStringValue = null; iLowerBound = null; iUpperBound = null; iMessage = super.getMessage(); } /** * Constructor. * * @param fieldType type of field being set * @param value illegal value being set * @param lowerBound lower legal field value, or null if not applicable * @param upperBound upper legal field value, or null if not applicable */ public IllegalFieldValueException(DurationFieldType fieldType, Number value, Number lowerBound, Number upperBound) { super(createMessage(fieldType.getName(), value, lowerBound, upperBound, null)); iDateTimeFieldType = null; iDurationFieldType = fieldType; iFieldName = fieldType.getName(); iNumberValue = value; iStringValue = null; iLowerBound = lowerBound; iUpperBound = upperBound; iMessage = super.getMessage(); } /** * Constructor. * * @param fieldName name of field being set * @param value illegal value being set * @param lowerBound lower legal field value, or null if not applicable * @param upperBound upper legal field value, or null if not applicable */ public IllegalFieldValueException(String fieldName, Number value, Number lowerBound, Number upperBound) { super(createMessage(fieldName, value, lowerBound, upperBound, null)); iDateTimeFieldType = null; iDurationFieldType = null; iFieldName = fieldName; iNumberValue = value; iStringValue = null; iLowerBound = lowerBound; iUpperBound = upperBound; iMessage = super.getMessage(); } /** * Constructor. * * @param fieldType type of field being set * @param value illegal value being set */ public IllegalFieldValueException(DateTimeFieldType fieldType, String value) { super(createMessage(fieldType.getName(), value)); iDateTimeFieldType = fieldType; iDurationFieldType = null; iFieldName = fieldType.getName(); iStringValue = value; iNumberValue = null; iLowerBound = null; iUpperBound = null; iMessage = super.getMessage(); } /** * Constructor. * * @param fieldType type of field being set * @param value illegal value being set */ public IllegalFieldValueException(DurationFieldType fieldType, String value) { super(createMessage(fieldType.getName(), value)); iDateTimeFieldType = null; iDurationFieldType = fieldType; iFieldName = fieldType.getName(); iStringValue = value; iNumberValue = null; iLowerBound = null; iUpperBound = null; iMessage = super.getMessage(); } /** * Constructor. * * @param fieldName name of field being set * @param value illegal value being set */ public IllegalFieldValueException(String fieldName, String value) { super(createMessage(fieldName, value)); iDateTimeFieldType = null; iDurationFieldType = null; iFieldName = fieldName; iStringValue = value; iNumberValue = null; iLowerBound = null; iUpperBound = null; iMessage = super.getMessage(); } //----------------------------------------------------------------------- /** * Returns the DateTimeFieldType whose value was invalid, or null if not applicable. * * @return the datetime field type */ public DateTimeFieldType getDateTimeFieldType() { return iDateTimeFieldType; } /** * Returns the DurationFieldType whose value was invalid, or null if not applicable. * * @return the duration field type */ public DurationFieldType getDurationFieldType() { return iDurationFieldType; } /** * Returns the name of the field whose value was invalid. * * @return the field name */ public String getFieldName() { return iFieldName; } /** * Returns the illegal integer value assigned to the field, or null if not applicable. * * @return the value */ public Number getIllegalNumberValue() { return iNumberValue; } /** * Returns the illegal string value assigned to the field, or null if not applicable. * * @return the value */ public String getIllegalStringValue() { return iStringValue; } /** * Returns the illegal value assigned to the field as a non-null string. * * @return the value */ public String getIllegalValueAsString() { String value = iStringValue; if (value == null) { value = String.valueOf(iNumberValue); } return value; } /** * Returns the lower bound of the legal value range, or null if not applicable. * * @return the lower bound */ public Number getLowerBound() { return iLowerBound; } /** * Returns the upper bound of the legal value range, or null if not applicable. * * @return the upper bound */ public Number getUpperBound() { return iUpperBound; } public String getMessage() { return iMessage; } /** * Provide additional detail by prepending a message to the existing message. * A colon is separator is automatically inserted between the messages. * @since 1.3 */ public void prependMessage(String message) { if (iMessage == null) { iMessage = message; } else if (message != null) { iMessage = message + ": " + iMessage; } } }
syntelos/gap-data
types/lib/joda-time/src/org/joda/time/IllegalFieldValueException.java
Java
lgpl-3.0
10,401
642
kfazolin/biblia
language/pt_br/bible/p_isaiah_14_14.html
HTML
unlicense
3
# coding: utf-8 from __future__ import unicode_literals import re import calendar import datetime from .common import InfoExtractor from ..compat import compat_str from ..utils import ( HEADRequest, unified_strdate, strip_jsonp, int_or_none, float_or_none, determine_ext, remove_end, unescapeHTML, ) class ORFTVthekIE(InfoExtractor): IE_NAME = 'orf:tvthek' IE_DESC = 'ORF TVthek' _VALID_URL = r'https?://tvthek\.orf\.at/(?:[^/]+/)+(?P<id>\d+)' _TESTS = [{ 'url': 'http://tvthek.orf.at/program/Aufgetischt/2745173/Aufgetischt-Mit-der-Steirischen-Tafelrunde/8891389', 'playlist': [{ 'md5': '2942210346ed779588f428a92db88712', 'info_dict': { 'id': '8896777', 'ext': 'mp4', 'title': 'Aufgetischt: Mit der Steirischen Tafelrunde', 'description': 'md5:c1272f0245537812d4e36419c207b67d', 'duration': 2668, 'upload_date': '20141208', }, }], 'skip': 'Blocked outside of Austria / Germany', }, { 'url': 'http://tvthek.orf.at/topic/Im-Wandel-der-Zeit/8002126/Best-of-Ingrid-Thurnher/7982256', 'info_dict': { 'id': '7982259', 'ext': 'mp4', 'title': 'Best of Ingrid Thurnher', 'upload_date': '20140527', 'description': 'Viele Jahre war Ingrid Thurnher das "Gesicht" der ZIB 2. Vor ihrem Wechsel zur ZIB 2 im Jahr 1995 moderierte sie unter anderem "Land und Leute", "Österreich-Bild" und "Niederösterreich heute".', }, 'params': { 'skip_download': True, # rtsp downloads }, '_skip': 'Blocked outside of Austria / Germany', }, { 'url': 'http://tvthek.orf.at/topic/Fluechtlingskrise/10463081/Heimat-Fremde-Heimat/13879132/Senioren-betreuen-Migrantenkinder/13879141', 'skip_download': True, }, { 'url': 'http://tvthek.orf.at/profile/Universum/35429', 'skip_download': True, }] def _real_extract(self, url): playlist_id = self._match_id(url) webpage = self._download_webpage(url, playlist_id) data_jsb = self._parse_json( self._search_regex( r'<div[^>]+class=(["\']).*?VideoPlaylist.*?\1[^>]+data-jsb=(["\'])(?P<json>.+?)\2', webpage, 'playlist', group='json'), playlist_id, transform_source=unescapeHTML)['playlist']['videos'] def quality_to_int(s): m = re.search('([0-9]+)', s) if m is None: return -1 return int(m.group(1)) entries = [] for sd in data_jsb: video_id, title = sd.get('id'), sd.get('title') if not video_id or not title: continue video_id = compat_str(video_id) formats = [{ 'preference': -10 if fd['delivery'] == 'hls' else None, 'format_id': '%s-%s-%s' % ( fd['delivery'], fd['quality'], fd['quality_string']), 'url': fd['src'], 'protocol': fd['protocol'], 'quality': quality_to_int(fd['quality']), } for fd in sd['sources']] # Check for geoblocking. # There is a property is_geoprotection, but that's always false geo_str = sd.get('geoprotection_string') if geo_str: try: http_url = next( f['url'] for f in formats if re.match(r'^https?://.*\.mp4$', f['url'])) except StopIteration: pass else: req = HEADRequest(http_url) self._request_webpage( req, video_id, note='Testing for geoblocking', errnote=(( 'This video seems to be blocked outside of %s. ' 'You may want to try the streaming-* formats.') % geo_str), fatal=False) self._check_formats(formats, video_id) self._sort_formats(formats) subtitles = {} for sub in sd.get('subtitles', []): sub_src = sub.get('src') if not sub_src: continue subtitles.setdefault(sub.get('lang', 'de-AT'), []).append({ 'url': sub_src, }) upload_date = unified_strdate(sd.get('created_date')) entries.append({ '_type': 'video', 'id': video_id, 'title': title, 'formats': formats, 'subtitles': subtitles, 'description': sd.get('description'), 'duration': int_or_none(sd.get('duration_in_seconds')), 'upload_date': upload_date, 'thumbnail': sd.get('image_full_url'), }) return { '_type': 'playlist', 'entries': entries, 'id': playlist_id, } class ORFOE1IE(InfoExtractor): IE_NAME = 'orf:oe1' IE_DESC = 'Radio Österreich 1' _VALID_URL = r'https?://oe1\.orf\.at/(?:programm/|konsole\?.*?\btrack_id=)(?P<id>[0-9]+)' # Audios on ORF radio are only available for 7 days, so we can't add tests. _TESTS = [{ 'url': 'http://oe1.orf.at/konsole?show=on_demand#?track_id=394211', 'only_matching': True, }, { 'url': 'http://oe1.orf.at/konsole?show=ondemand&track_id=443608&load_day=/programm/konsole/tag/20160726', 'only_matching': True, }] def _real_extract(self, url): show_id = self._match_id(url) data = self._download_json( 'http://oe1.orf.at/programm/%s/konsole' % show_id, show_id ) timestamp = datetime.datetime.strptime('%s %s' % ( data['item']['day_label'], data['item']['time'] ), '%d.%m.%Y %H:%M') unix_timestamp = calendar.timegm(timestamp.utctimetuple()) return { 'id': show_id, 'title': data['item']['title'], 'url': data['item']['url_stream'], 'ext': 'mp3', 'description': data['item'].get('info'), 'timestamp': unix_timestamp } class ORFFM4IE(InfoExtractor): IE_NAME = 'orf:fm4' IE_DESC = 'radio FM4' _VALID_URL = r'https?://fm4\.orf\.at/(?:7tage/?#|player/)(?P<date>[0-9]+)/(?P<show>\w+)' _TEST = { 'url': 'http://fm4.orf.at/player/20160110/IS/', 'md5': '01e736e8f1cef7e13246e880a59ad298', 'info_dict': { 'id': '2016-01-10_2100_tl_54_7DaysSun13_11244', 'ext': 'mp3', 'title': 'Im Sumpf', 'description': 'md5:384c543f866c4e422a55f66a62d669cd', 'duration': 7173, 'timestamp': 1452456073, 'upload_date': '20160110', }, 'skip': 'Live streams on FM4 got deleted soon', } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) show_date = mobj.group('date') show_id = mobj.group('show') data = self._download_json( 'http://audioapi.orf.at/fm4/json/2.0/broadcasts/%s/4%s' % (show_date, show_id), show_id ) def extract_entry_dict(info, title, subtitle): return { 'id': info['loopStreamId'].replace('.mp3', ''), 'url': 'http://loopstream01.apa.at/?channel=fm4&id=%s' % info['loopStreamId'], 'title': title, 'description': subtitle, 'duration': (info['end'] - info['start']) / 1000, 'timestamp': info['start'] / 1000, 'ext': 'mp3' } entries = [extract_entry_dict(t, data['title'], data['subtitle']) for t in data['streams']] return { '_type': 'playlist', 'id': show_id, 'title': data['title'], 'description': data['subtitle'], 'entries': entries } class ORFIPTVIE(InfoExtractor): IE_NAME = 'orf:iptv' IE_DESC = 'iptv.ORF.at' _VALID_URL = r'https?://iptv\.orf\.at/(?:#/)?stories/(?P<id>\d+)' _TEST = { 'url': 'http://iptv.orf.at/stories/2275236/', 'md5': 'c8b22af4718a4b4af58342529453e3e5', 'info_dict': { 'id': '350612', 'ext': 'flv', 'title': 'Weitere Evakuierungen um Vulkan Calbuco', 'description': 'md5:d689c959bdbcf04efeddedbf2299d633', 'duration': 68.197, 'thumbnail': 're:^https?://.*\.jpg$', 'upload_date': '20150425', }, } def _real_extract(self, url): story_id = self._match_id(url) webpage = self._download_webpage( 'http://iptv.orf.at/stories/%s' % story_id, story_id) video_id = self._search_regex( r'data-video(?:id)?="(\d+)"', webpage, 'video id') data = self._download_json( 'http://bits.orf.at/filehandler/static-api/json/current/data.json?file=%s' % video_id, video_id)[0] duration = float_or_none(data['duration'], 1000) video = data['sources']['default'] load_balancer_url = video['loadBalancerUrl'] abr = int_or_none(video.get('audioBitrate')) vbr = int_or_none(video.get('bitrate')) fps = int_or_none(video.get('videoFps')) width = int_or_none(video.get('videoWidth')) height = int_or_none(video.get('videoHeight')) thumbnail = video.get('preview') rendition = self._download_json( load_balancer_url, video_id, transform_source=strip_jsonp) f = { 'abr': abr, 'vbr': vbr, 'fps': fps, 'width': width, 'height': height, } formats = [] for format_id, format_url in rendition['redirect'].items(): if format_id == 'rtmp': ff = f.copy() ff.update({ 'url': format_url, 'format_id': format_id, }) formats.append(ff) elif determine_ext(format_url) == 'f4m': formats.extend(self._extract_f4m_formats( format_url, video_id, f4m_id=format_id)) elif determine_ext(format_url) == 'm3u8': formats.extend(self._extract_m3u8_formats( format_url, video_id, 'mp4', m3u8_id=format_id)) else: continue self._sort_formats(formats) title = remove_end(self._og_search_title(webpage), ' - iptv.ORF.at') description = self._og_search_description(webpage) upload_date = unified_strdate(self._html_search_meta( 'dc.date', webpage, 'upload date')) return { 'id': video_id, 'title': title, 'description': description, 'duration': duration, 'thumbnail': thumbnail, 'upload_date': upload_date, 'formats': formats, }
TRox1972/youtube-dl
youtube_dl/extractor/orf.py
Python
unlicense
11,297
html, body, div, span, applet, object, iframe, h1, h2, h3, h4, h5, h6, p, blockquote, pre, a, abbr, acronym, address, big, cite, code, del, dfn, em, img, ins, kbd, q, s, samp, small, strike, strong, sub, sup, tt, var, b, u, i, center, dl, dt, dd, ol, ul, li, fieldset, form, label, legend, table, caption, tbody, tfoot, thead, tr, th, td, article, aside, canvas, details, embed, figure, figcaption, footer, header, hgroup, menu, nav, output, ruby, section, summary, time, mark, audio, video { margin: 0; padding: 0; border: 0; font: inherit; font-size: 100%; vertical-align: baseline; font-family: "PT Serif",Georgia,Times,"Times New Roman",serif; } html { line-height: 1 } ol, ul { list-style: none } table { border-collapse: collapse; border-spacing: 0; } caption, th, td { text-align: left; font-weight: normal; vertical-align: middle; } q, blockquote { quotes: none } q:before, q:after, blockquote:before, blockquote:after { content: ""; content: none } a img { border: none } article, aside, details, figcaption, figure, footer, header, hgroup, menu, nav, section, summary { display: block } a { color: #1863a1 } a:visited { color: #751590 } a:focus { color: #0181eb } a:hover { color: #0181eb } a:active { color: #01579f } aside.sidebar a { color: #222 } aside.sidebar a:focus { color: #0181eb } aside.sidebar a:hover { color: #0181eb } aside.sidebar a:active { color: #01579f } a { -webkit-transition: color 0.3s; -moz-transition: color 0.3s; -o-transition: color 0.3s; transition: color 0.3s } html { background: #252525 top left } body>div { background: #f2f2f2 top left; border-bottom: 1px solid #bfbfbf } body>div>div { background: #f8f8f8 top left; border-right: 1px solid #e0e0e0 } /*.heading, body>header h1, h1, h2, h3, h4, h5, h6 { font-family: "PT Serif", "Georgia", "Helvetica Neue", Arial, sans-serif }*/ .sans, body>header h2, article header p.meta, article>footer, #content .blog-index footer, /*html .gist .gist-file .gist-meta,*/ #blog-archives a.category, #blog-archives time, aside.sidebar section, body>footer { font-family: "PT Sans", "Helvetica Neue", Arial, sans-serif } .serif, body, #content .blog-index a[rel=full-article] { /* font-family: "PT Serif", Georgia, Times, "Times New Roman", serif */ } .mono, pre, code, tt, p code, li code { font-family: Menlo, Monaco, "Andale Mono", "lucida console", "Courier New", monospace; } body>header h1 { font-size: 2.2em; font-family: "PT Serif", "Georgia", "Helvetica Neue", Arial, sans-serif; font-weight: normal; line-height: 1.2em; margin-bottom: 0.6667em } body>header h2 { font-family: "PT Serif", "Georgia", "Helvetica Neue", Arial, sans-serif } body { line-height: 1.5em; color: #222 } h1 { font-size: 2.2em; line-height: 1.2em } /* @media only screen and (min-width: 992px) { body { font-size: 1.15em } h1 { font-size: 2.6em; line-height: 1.2em } } */ h1, h2, h3, h4, h5, h6 { text-rendering: optimizelegibility; margin-bottom: 1em; font-weight: bold; margin-top: 1em; } h2, section h1 { font-size: 1.5em } h3, section h2, section section h1 { font-size: 1.3em } h4, section h3, section section h2, section section section h1 { font-size: 1em } h5, section h4, section section h3 { font-size: .9em } h6, section h5, section section h4, section section section h3 { font-size: .8em } p, article blockquote, ul, ol { margin-bottom: 0.8em; /* changed this from 1.5em to 0.8 */ } ul { list-style-type: disc } ul ul { list-style-type: circle; margin-bottom: 0px } ul ul ul { list-style-type: square; margin-bottom: 0px } ol { list-style-type: decimal } ol ol { list-style-type: lower-alpha; margin-bottom: 0px } ol ol ol { list-style-type: lower-roman; margin-bottom: 0px } ul, ul ul, ul ol, ol, ol ul, ol ol { margin-left: 1.3em } ul ul, ul ol, ol ul, ol ol { margin-bottom: 0em } strong { font-weight: bold } em { font-style: italic } sup, sub { font-size: 0.75em; position: relative; display: inline-block; padding: 0 .2em; line-height: .8em } sup { top: -.5em } sub { bottom: -.5em } a[rev='footnote'] { font-size: .75em; padding: 0 .3em; line-height: 1 } q { font-style: italic } q:before { content: "\201C" } q:after { content: "\201D" } em, dfn { font-style: italic } strong, dfn { font-weight: bold } del, s { text-decoration: line-through } abbr, acronym { border-bottom: 1px dotted; cursor: help } hr { margin-bottom: 0.2em } small { font-size: .8em } big { font-size: 1.2em } article blockquote { font-style: italic; position: relative; font-size: 1.0em; line-height: 1.5em; padding-left: 1em; border-left: 4px solid rgba(170, 170, 170, 0.5) } article blockquote cite { font-style: italic } article blockquote cite a { color: #aaa !important; word-wrap: break-word } article blockquote cite:before { content: '\2014'; padding-right: .3em; padding-left: .3em; color: #aaa } @media only screen and (min-width: 992px) { article blockquote { padding-left: 1.5em; border-left-width: 4px } } .pullquote-right:before, .pullquote-left:before { padding: 0; border: none; content: attr(data-pullquote); float: right; width: 45%; margin: .5em 0 1em 1.5em; position: relative; top: 7px; font-size: 1.4em; line-height: 1.45em } .pullquote-left:before { float: left; margin: .5em 1.5em 1em 0 } .force-wrap, article a, aside.sidebar a { white-space: -moz-pre-wrap; white-space: -pre-wrap; white-space: -o-pre-wrap; white-space: pre-wrap; word-wrap: break-word } .group, body>header, body>nav, body>footer, body #content>article, body #content>div>article, body #content>div>section, body div.pagination, aside.sidebar, #main, #content, .sidebar { *zoom: 1 } .group:after, body>header:after, body>nav:after, body>footer:after, body #content>article:after, body #content>div>section:after, body div.pagination:after, #main:after, #content:after, .sidebar:after { content: ""; display: table; clear: both } body { -webkit-text-size-adjust: none; max-width: 1200px; /* original is 1200px but for some reason it is smaller in my screen */ position: relative; margin: 0 auto; font-size: 110%; } body>header, body>nav, body>footer, body #content>article, body #content>div>article, body #content>div>section { padding-left: 18px; padding-right: 18px } @media only screen and (min-width: 480px) { body>header, body>nav, body>footer, body #content>article, body #content>div>article, body #content>div>section { padding-left: 25px; padding-right: 25px } } @media only screen and (min-width: 768px) { body>header, body>nav, body>footer, body #content>article, body #content>div>article, body #content>div>section { padding-left: 35px; padding-right: 35px } } @media only screen and (min-width: 992px) { body>header, body>nav, body>footer, body #content>article, body #content>div>article, body #content>div>section { /* changed padding for the main article from 55px to 40px*/ padding-left: 40px; padding-right: 40px; } } body div.pagination { margin-left: 18px; margin-right: 18px } @media only screen and (min-width: 480px) { body div.pagination { margin-left: 25px; margin-right: 25px } } @media only screen and (min-width: 768px) { body div.pagination { margin-left: 35px; margin-right: 35px } } @media only screen and (min-width: 992px) { body div.pagination { margin-left: 55px; margin-right: 55px } } body>header { font-size: 1em; padding-top: 1.5em; padding-bottom: 1.5em } #content { overflow: hidden } #content>div, #content>article { width: 100% } aside.sidebar { float: none; padding: 0 18px 1px; background-color: #f7f7f7; border-top: 1px solid #e0e0e0 } .flex-content, article img, article video, article .flash-video, article .caption-wrapper, aside.sidebar img { max-width: 100%; height: auto } .basic-alignment.left, article img.left, article video.left, article .left.flash-video, article .left.caption-wrapper, aside.sidebar img.left { float: left; margin-right: 1.5em } .basic-alignment.right, article img.right, article video.right, article .right.flash-video, article .right.caption-wrapper, aside.sidebar img.right { float: right; margin-left: 1.5em } .basic-alignment.center, article img.center, article video.center, article .center.flash-video, article .center.caption-wrapper, aside.sidebar img.center { display: block; margin: 0 auto 1.5em } .basic-alignment.left, article img.left, article video.left, article .left.flash-video, article .left.caption-wrapper, aside.sidebar img.left, .basic-alignment.right, article img.right, article video.right, article .right.flash-video, article .right.caption-wrapper, aside.sidebar img.right { margin-bottom: .8em } .toggle-sidebar, .no-sidebar .toggle-sidebar { display: none } @media only screen and (min-width: 750px) { body.sidebar-footer aside.sidebar { float: none; width: auto; clear: left; margin: 0; padding: 0 35px 1px; background-color: #f7f7f7; border-top: 1px solid #eaeaea } body.sidebar-footer aside.sidebar section.odd, body.sidebar-footer aside.sidebar section.even { float: left; width: 48% } body.sidebar-footer aside.sidebar section.odd { margin-left: 0 } body.sidebar-footer aside.sidebar section.even { margin-left: 4% } body.sidebar-footer aside.sidebar.thirds section { width: 30%; margin-left: 5% } body.sidebar-footer aside.sidebar.thirds section.first { margin-left: 0; clear: both } } body.sidebar-footer #content { margin-right: 0px } body.sidebar-footer .toggle-sidebar { display: none } @media only screen and (min-width: 550px) { body>header { font-size: 1em } } @media only screen and (min-width: 750px) { aside.sidebar { float: none; width: auto; clear: left; margin: 0; padding: 0 35px 1px; background-color: #f7f7f7; border-top: 1px solid #eaeaea } aside.sidebar section.odd, aside.sidebar section.even { float: left; width: 48% } aside.sidebar section.odd { margin-left: 0 } aside.sidebar section.even { margin-left: 4% } aside.sidebar.thirds section { width: 30%; margin-left: 5% } aside.sidebar.thirds section.first { margin-left: 0; clear: both } } @media only screen and (min-width: 768px) { body { -webkit-text-size-adjust: auto } body>header { font-size: 1.2em } #main { padding: 0; margin: 0 auto } #content { overflow: visible; margin-right: 240px; position: relative } .no-sidebar #content { margin-right: 0; border-right: 0 } .collapse-sidebar #content { margin-right: 20px } #content>div, #content>article { padding-top: 17.5px; padding-bottom: 17.5px; float: left } aside.sidebar { width: 210px; padding: 0 15px 15px; background: none; clear: none; float: left; margin: 0 -100% 0 0; } aside.sidebar section { width: auto; margin-left: 0 } aside.sidebar section.odd, aside.sidebar section.even { float: none; width: auto; margin-left: 0 } .collapse-sidebar aside.sidebar { float: none; width: auto; clear: left; margin: 0; padding: 0 35px 1px; background-color: #f7f7f7; border-top: 1px solid #eaeaea } .collapse-sidebar aside.sidebar section.odd, .collapse-sidebar aside.sidebar section.even { float: left; width: 48% } .collapse-sidebar aside.sidebar section.odd { margin-left: 0 } .collapse-sidebar aside.sidebar section.even { margin-left: 4% } .collapse-sidebar aside.sidebar.thirds section { width: 30%; margin-left: 5% } .collapse-sidebar aside.sidebar.thirds section.first { margin-left: 0; clear: both } } @media only screen and (min-width: 992px) { body>header { font-size: 1.3em } #content { margin-right: 320px; /* increase this by X where X is the increase in width of aside.sidebar a few line down */ } #content>div, #content>article { padding-top: 0.5em; padding-bottom: 0.5em; } aside.sidebar { width: 280px; /* increase sidebad, remember to increase the margin-right of #content a few lines up by the same amount */ padding: 0em 20px 20px } .collapse-sidebar aside.sidebar { padding-left: 55px; padding-right: 55px } } body>header { background: #333 } body>header h1 { display: inline-block; margin: 0 } body>header h1 a, body>header h1 a:visited, body>header h1 a:hover { color: #f2f2f2; text-decoration: none } body>header h2 { margin: .2em 0 0; font-size: 1em; color: #aaa; font-weight: normal } body>nav { position: relative; background-color: #ccc; background: -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(0%, #e0e0e0), color-stop(50%, #cccccc), color-stop(100%, #b0b0b0)); background: -webkit-linear-gradient(#e0e0e0, #cccccc, #b0b0b0); background: -moz-linear-gradient(#e0e0e0, #cccccc, #b0b0b0); background: -o-linear-gradient(#e0e0e0, #cccccc, #b0b0b0); background: linear-gradient(#e0e0e0, #cccccc, #b0b0b0); border-top: 1px solid #f2f2f2; border-bottom: 1px solid #8c8c8c; padding-top: .35em; padding-bottom: .35em } body>nav form { -webkit-background-clip: padding; -moz-background-clip: padding; background-clip: padding-box; margin: 0; padding: 0 } body>nav form .search { padding: .3em .5em 0; font-size: .85em; font-family: "PT Sans", "Helvetica Neue", Arial, sans-serif; line-height: 1.1em; width: 95%; -webkit-border-radius: 0.5em; -moz-border-radius: 0.5em; -ms-border-radius: 0.5em; -o-border-radius: 0.5em; border-radius: 0.5em; -webkit-background-clip: padding; -moz-background-clip: padding; background-clip: padding-box; -webkit-box-shadow: #d1d1d1 0 1px; -moz-box-shadow: #d1d1d1 0 1px; box-shadow: #d1d1d1 0 1px; background-color: #f2f2f2; border: 1px solid #b3b3b3; color: #888 } body>nav form .search:focus { color: #444; border-color: #80b1df; -webkit-box-shadow: #80b1df 0 0 4px, #80b1df 0 0 3px inset; -moz-box-shadow: #80b1df 0 0 4px, #80b1df 0 0 3px inset; box-shadow: #80b1df 0 0 4px, #80b1df 0 0 3px inset; background-color: #fff; outline: none } body>nav fieldset[role=search] { float: right; width: 48% } body>nav fieldset.mobile-nav { float: left; width: 48% } body>nav fieldset.mobile-nav select { width: 100%; font-size: .8em; border: 1px solid #888 } body>nav ul { display: none } @media only screen and (min-width: 550px) { body>nav { font-size: .9em } body>nav ul { margin: 0; padding: 0; border: 0; overflow: hidden; *zoom: 1; float: left; display: block; padding-top: .15em } body>nav ul li { list-style-image: none; list-style-type: none; margin-left: 0; white-space: nowrap; display: inline; float: left; padding-left: 0; padding-right: 0 } body>nav ul li:first-child, body>nav ul li.first { padding-left: 0 } body>nav ul li:last-child { padding-right: 0 } body>nav ul li.last { padding-right: 0 } body>nav ul.subscription { margin-left: .8em; float: right } body>nav ul.subscription li:last-child a { padding-right: 0 } body>nav ul li { margin: 0 } body>nav a { color: #6b6b6b; font-family: "PT Sans", "Helvetica Neue", Arial, sans-serif; text-shadow: #ebebeb 0 1px; float: left; text-decoration: none; font-size: 1.1em; padding: .1em 0; line-height: 1.5em } body>nav a:visited { color: #6b6b6b } body>nav a:hover { color: #2b2b2b } body>nav li+li { border-left: 1px solid #b0b0b0; margin-left: .8em } body>nav li+li a { padding-left: .8em; border-left: 1px solid #dedede } body>nav form { float: right; text-align: left; padding-left: .8em; width: 175px } body>nav form .search { width: 93%; font-size: .95em; line-height: 1.2em } body>nav ul[data-subscription$=email]+form { width: 97px } body>nav ul[data-subscription$=email]+form .search { width: 91% } body>nav fieldset.mobile-nav { display: none } body>nav fieldset[role=search] { width: 99% } } @media only screen and (min-width: 992px) { body>nav form { width: 215px } body>nav ul[data-subscription$=email]+form { width: 147px } } .no-placeholder body>nav .search { background: #f2f2f2 0.3em 0.25em no-repeat; text-indent: 1.3em } @media only screen and (min-width: 550px) { .maskImage body>nav ul[data-subscription$=email]+form { width: 123px } } @media only screen and (min-width: 992px) { .maskImage body>nav ul[data-subscription$=email]+form { width: 173px } } .maskImage ul.subscription { position: relative; top: .2em } .maskImage ul.subscription li, .maskImage ul.subscription a { border: 0; padding: 0 } .maskImage a[rel=subscribe-rss] { position: relative; top: 0px; text-indent: -999999em; background-color: #dedede; border: 0; padding: 0 } .maskImage a[rel=subscribe-rss], .maskImage a[rel=subscribe-rss]:after { -webkit-mask-repeat: no-repeat; -moz-mask-repeat: no-repeat; -ms-mask-repeat: no-repeat; -o-mask-repeat: no-repeat; mask-repeat: no-repeat; width: 22px; height: 22px } .maskImage a[rel=subscribe-rss]:after { content: ""; position: absolute; top: -1px; left: 0; background-color: #ababab } .maskImage a[rel=subscribe-rss]:hover:after { background-color: #9e9e9e } .maskImage a[rel=subscribe-email] { position: relative; top: 0px; text-indent: -999999em; background-color: #dedede; border: 0; padding: 0 } .maskImage a[rel=subscribe-email], .maskImage a[rel=subscribe-email]:after { -webkit-mask-repeat: no-repeat; -moz-mask-repeat: no-repeat; -ms-mask-repeat: no-repeat; -o-mask-repeat: no-repeat; mask-repeat: no-repeat; width: 28px; height: 22px } .maskImage a[rel=subscribe-email]:after { content: ""; position: absolute; top: -1px; left: 0; background-color: #ababab } .maskImage a[rel=subscribe-email]:hover:after { background-color: #9e9e9e } article { padding-top: 1em } article header { position: relative; /*padding-top: 2em;*/ padding-bottom: 1em; margin-bottom: 1em; } article header h1 { margin: 0; margin-top: 0.15em; } article header h1 a { text-decoration: none } article header h1 a:hover { text-decoration: underline } article header p { font-size: .9em; color: #aaa; margin: 0 } article header p.meta { text-transform: uppercase; /*position: absolute;*/ top: 0; } @media only screen and (min-width: 768px) { article header { margin-bottom: 1.5em; padding-bottom: 1em; } } .entry-content article h2:first-child, article header+h2 { padding-top: 0 } article h2:first-child, article header+h2 { background: none } article .feature { padding-top: .5em; margin-bottom: 1em; padding-bottom: 1em; font-size: 2.0em; font-style: italic; line-height: 1.3em } article img, article video, article .flash-video, article .caption-wrapper { -webkit-border-radius: 0.3em; -moz-border-radius: 0.3em; -ms-border-radius: 0.3em; -o-border-radius: 0.3em; border-radius: 0.3em; -webkit-box-shadow: rgba(0, 0, 0, 0.15) 0 1px 4px; -moz-box-shadow: rgba(0, 0, 0, 0.15) 0 1px 4px; box-shadow: rgba(0, 0, 0, 0.15) 0 1px 4px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; border: #fff 0.5em solid } article img.caption, article video.caption, article .flash-video.caption, article .caption-wrapper.caption { -webkit-border-radius: 0px; -moz-border-radius: 0px; -ms-border-radius: 0px; -o-border-radius: 0px; border-radius: 0px; -webkit-box-shadow: 0px; -moz-box-shadow: 0px; box-shadow: 0px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; border: 0px } article .caption-wrapper { display: inline-block; margin-bottom: 1em; } article .caption-wrapper .caption-text { background: #fff; text-align: center; font-size: .8em; color: #666; display: block } article video, article .flash-video { margin: 0 auto 1.5em } article video { display: block; width: 100% } article .flash-video>div { position: relative; display: block; padding-bottom: 56.25%; padding-top: 1px; height: 0; overflow: hidden } article .flash-video>div iframe, article .flash-video>div object, article .flash-video>div embed { position: absolute; top: 0; left: 0; width: 100%; height: 100% } article>footer { padding-bottom: 0.3em; /* decrease footer padding */ margin-top: 0.3em; } article>footer p.meta { margin-bottom: .8em; font-size: .85em; clear: both; overflow: hidden } #content .blog-index { padding-top: 0; padding-bottom: 0 } #content .blog-index article { padding-top: 2em; font-family: "PT Serif",Georgia,Times,"Times New Roman",serif; } #content .blog-index article header { background: none; padding-bottom: 0 } #content .blog-index article h1 { font-size: 2.2em } #content .blog-index article h1 a { color: inherit } #content .blog-index article h1 a:hover { color: #0181eb } #content .blog-index a[rel=full-article] { background: #ebebeb; display: inline-block; padding: .4em .8em; margin-right: .5em; text-decoration: none; color: #666; -webkit-transition: background-color 0.5s; -moz-transition: background-color 0.5s; -o-transition: background-color 0.5s; transition: background-color 0.5s } #content .blog-index a[rel=full-article]:hover { background: #0181eb; text-shadow: none; color: #f8f8f8 } #content .blog-index footer { margin-top: 1em } .separator, article>footer .byline+time:before, article>footer time+time:before, article>footer .comments:before, article>footer .byline ~ .categories:before { content: "\2022 "; padding: 0 .4em 0 .2em; display: inline-block } /* pagination stuff */ #content div.pagination { text-align: center; font-size: .95em; position: relative; padding-top: 1.5em; padding-bottom: 1.5em } #content div.pagination a { text-decoration: none; color: #000; padding-left: 0.4em; /* horizontal space between pagination links */ } #content div.pagination a:hover { /* hover color for pagination links/icons */ color: #0181eb } /* if you want to send pagination icons/links to left or right, you can use these labels #content div.pagination a.prev { position: absolute; left: 0 } #content div.pagination a.next { position: absolute; right: 0 } #content div.pagination a[href*=archive]:before, #content div.pagination a[href*=archive]:after { content: '\2014'; padding: 0 .3em } */ p.meta+.sharing { padding-top: 1em; padding-left: 0; } #fb-root { display: none } pre { background: #002b36 top left; -webkit-border-radius: 0.0em; -moz-border-radius: 0.0em; -ms-border-radius: 0.0em; -o-border-radius: 0.0em; border-radius: 0.0em; line-height: 1.45em; margin-bottom: 0.8em; padding: 0em 0em; color: #93a1a1; font-size: 80%; /* change the code font size */ } h3.filename+pre { -moz-border-radius-topleft: 0px; -webkit-border-top-left-radius: 0px; border-top-left-radius: 0px; -moz-border-radius-topright: 0px; -webkit-border-top-right-radius: 0px; border-top-right-radius: 0px } p code, li code { display: inline-block; white-space: no-wrap; background: #fff; font-size: .8em; line-height: 1.5em; color: #555; border: 1px solid #ddd; -webkit-border-radius: 0.4em; -moz-border-radius: 0.4em; -ms-border-radius: 0.4em; -o-border-radius: 0.4em; border-radius: 0.4em; padding: 0 .3em; margin: -1px 0 } p pre code, li pre code { font-size: 1em !important; background: none; border: none; color: #93a1a1; } /* Fix codecaption and the new Chroma highlighter */ div.highlight > pre > code, pre > code { overflow-y: hidden; display: block; } td > div.highlight > pre > code, div.linenodiv > pre > code { overflow: scroll; overflow-y: hidden; display: inline; } /* End fix codecaption and the new Chroma highlighter */ pre::-webkit-scrollbar, .highlighttable::-webkit-scrollbar, .gist-highlighttable::-webkit-scrollbar { height: .5em; background: rgba(255, 255, 255, 0.15) } pre::-webkit-scrollbar-thumb:horizontal, .highlighttable::-webkit-scrollbar-thumb:horizontal, .gist-highlighttable::-webkit-scrollbar-thumb:horizontal { background: rgba(255, 255, 255, 0.2); -webkit-border-radius: 4px; border-radius: 4px } figure.code { background: none; padding: 0; border: 0; margin-bottom: 0.8em } figure.code pre { margin-bottom: 0 } figure.code figcaption { position: relative } figure.code .highlighttable { margin-bottom: 0; table-layout: fixed; width: 100%; } .code-title, h3.filename, figure.code figcaption { /* figcaption changes */ text-align: center; line-height: 2em; text-shadow: #cbcccc 0 1px 0; color: #474747; font-weight: normal; margin-bottom: 0; -moz-border-radius-topleft: 5px; -webkit-border-top-left-radius: 5px; border-top-left-radius: 5px; -moz-border-radius-topright: 5px; -webkit-border-top-right-radius: 5px; border-top-right-radius: 5px; background: #aaa top repeat-x; border: 1px solid #565656; border-top-color: #cbcbcb; border-left-color: #a5a5a5; border-right-color: #a5a5a5; border-bottom: 0; font-size: 90%; /* change size of font in figcaption */ } .download-source, figure.code figcaption a { position: absolute; right: .8em; text-decoration: none; color: #666 !important; z-index: 1; font-size: 13px; text-shadow: #cbcccc 0 1px 0; padding-left: 3em } .download-source:hover, figure.code figcaption a:hover { text-decoration: underline } #archive #content>div, #archive #content>div>article { padding-top: 0 } #blog-archives { color: #aaa } #blog-archives article { padding: 1em 0 1em; position: relative; } #blog-archives article:last-child { background: none } #blog-archives article footer { padding: 0; margin: 0 } #blog-archives h1 { color: #222; margin-bottom: .3em } #blog-archives h2 { display: none } #blog-archives h1 { font-size: 1.5em } #blog-archives h1 a { text-decoration: none; color: inherit; font-weight: normal; display: inline-block } #blog-archives h1 a:hover { text-decoration: underline } #blog-archives h1 a:hover { color: #0181eb } #blog-archives a.category, #blog-archives time { color: #aaa; } #blog-archives .entry-content { display: none } #blog-archives time { font-size: .9em; line-height: 1.2em } #blog-archives time .month, #blog-archives time .day { display: inline-block; } #blog-archives time .month { text-transform: uppercase } #blog-archives p { margin-bottom: 1em } #blog-archives a, #blog-archives .entry-content a { color: inherit } #blog-archives a:hover, #blog-archives .entry-content a:hover { color: #0181eb } #blog-archives a:hover { color: #0181eb } @media only screen and (min-width: 550px) { #blog-archives article { margin-left: 5em } #blog-archives h2 { margin-bottom: 0.3em; font-weight: normal; display: inline-block; position: relative; top: -1px; float: left; margin-top: 1.7em; /* align year with the rest */ } #blog-archives time { position: absolute; text-align: right; left: 0em; top: 3em; /* may need to change this to align the month/date on the tag/category (archive) pages with post name */ font-family: "PT Sans","Helvetica Neue",Arial,sans-serif; } /* #blog-archives .year { display: none; }*/ #blog-archives article { padding-left: 4.5em; padding-bottom: .7em; } #blog-archives a.category { line-height: 1.1em; } } #content>.category article { margin-left: 0; padding-left: 6.8em } #content>.category .year { display: inline; } .side-shadow-border, aside.sidebar section h1, aside.sidebar li { -webkit-box-shadow: #fff 0 1px; -moz-box-shadow: #fff 0 1px; box-shadow: #fff 0 1px } aside.sidebar { overflow: hidden; color: #4b4b4b; text-shadow: #fff 0 1px; } aside.sidebar section { font-size: 0.9em; line-height: 1.4em; margin-bottom: 0.5em } aside.sidebar section h1 { margin: 0.5em 0 0; padding-bottom: .2em; border-bottom: 1px solid #e0e0e0 } aside.sidebar section h1+p { padding-top: .4em } aside.sidebar img { -webkit-border-radius: 0.3em; -moz-border-radius: 0.3em; -ms-border-radius: 0.3em; -o-border-radius: 0.3em; border-radius: 0.3em; -webkit-box-shadow: rgba(0, 0, 0, 0.15) 0 1px 4px; -moz-box-shadow: rgba(0, 0, 0, 0.15) 0 1px 4px; box-shadow: rgba(0, 0, 0, 0.15) 0 1px 4px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; border: #fff 0.3em solid } aside.sidebar ul { margin-bottom: 0.5em; margin-left: 0 } aside.sidebar li { list-style: none; padding: .5em 0; margin: 0; border-bottom: 1px solid #e0e0e0 } aside.sidebar li p:last-child { margin-bottom: 0 } aside.sidebar a { color: inherit; -webkit-transition: color 0.5s; -moz-transition: color 0.5s; -o-transition: color 0.5s; transition: color 0.5s } aside.sidebar:hover a { color: #222 } aside.sidebar:hover a:hover { color: #0181eb } @media only screen and (min-width: 768px) { .toggle-sidebar { outline: none; position: absolute; right: -10px; top: 0; bottom: 0; display: inline-block; text-decoration: none; color: #cecece; width: 9px; cursor: pointer } .toggle-sidebar:hover { background: #e9e9e9; background: -webkit-gradient(linear, 0% 50%, 100% 50%, color-stop(0%, rgba(224, 224, 224, 0.5)), color-stop(100%, rgba(224, 224, 224, 0))); background: -webkit-linear-gradient(left, rgba(224, 224, 224, 0.5), rgba(224, 224, 224, 0)); background: -moz-linear-gradient(left, rgba(224, 224, 224, 0.5), rgba(224, 224, 224, 0)); background: -o-linear-gradient(left, rgba(224, 224, 224, 0.5), rgba(224, 224, 224, 0)); background: linear-gradient(left, rgba(224, 224, 224, 0.5), rgba(224, 224, 224, 0)) } .toggle-sidebar:after { position: absolute; right: -11px; top: 0; width: 20px; font-size: 1.2em; line-height: 1.1em; padding-bottom: .15em; -moz-border-radius-bottomright: 0.3em; -webkit-border-bottom-right-radius: 0.3em; border-bottom-right-radius: 0.3em; text-align: center; border-bottom: 1px solid #e0e0e0; border-right: 1px solid #e0e0e0; content: "\00BB"; text-indent: -1px } .collapse-sidebar .toggle-sidebar { text-indent: 0px; right: -20px; width: 19px } .collapse-sidebar .toggle-sidebar:hover { background: #e9e9e9 } .collapse-sidebar .toggle-sidebar:after { border-left: 1px solid #e0e0e0; text-shadow: #fff 0 1px; content: "\00AB"; left: 0px; right: 0; text-align: center; text-indent: 0; border: 0; border-right-width: 0; background: none } } body>footer { font-size: 1em; /* increase font size for footer */ color: #888; text-shadow: #d9d9d9 0 1px; background-color: #ccc; background: -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(0%, #e0e0e0), color-stop(50%, #cccccc), color-stop(100%, #b0b0b0)); background: -webkit-linear-gradient(#e0e0e0, #cccccc, #b0b0b0); background: -moz-linear-gradient(#e0e0e0, #cccccc, #b0b0b0); background: -o-linear-gradient(#e0e0e0, #cccccc, #b0b0b0); background: linear-gradient(#e0e0e0, #cccccc, #b0b0b0); border-top: 1px solid #f2f2f2; position: relative; padding-top: 1em; padding-bottom: 1em; margin-bottom: 3em; -moz-border-radius-bottomleft: 0.4em; -webkit-border-bottom-left-radius: 0.4em; border-bottom-left-radius: 0.4em; -moz-border-radius-bottomright: 0.4em; -webkit-border-bottom-right-radius: 0.4em; border-bottom-right-radius: 0.4em; z-index: 1 } body>footer a { color: #6b6b6b } body>footer a:visited { color: #6b6b6b } body>footer a:hover { color: #484848 } body>footer p:last-child { margin-bottom: 0 } table, th, td { border: 1px solid black; padding: 3px; } th { font-weight: bold; text-align: center; } /* Taken from hyde-x.css to format labels (categories) as Octopress doesn't have */ .label { display: inline; padding: .2em .6em .3em; font-size: 80%; font-weight: bold; line-height: 1; color: #fff; text-align: center; vertical-align: baseline; border-radius: .25em; } a.label:hover, a.label:focus { color: #fff; text-decoration: none; cursor: pointer; } .label:empty { display: none; } /* keep the color of visited categories- visited color makes them unreadable */ a.label:visited { color: #fff; } .label { margin: 0 .25em; background-color: #313131; } .posts a.label, .post-date a.label { color: #fff; text-decoration: none; cursor: pointer; } /* this part makes the mycode shortcode work */ .linenodiv { background-color: #002B36!important; } /* because we have changed the font-family of everything, we need to change it for the code inside pre to fit with line numbers */ pre span { font-family: Menlo,Monaco,"Andale Mono","lucida console","Courier New",monospace; } td.code { width:95%; } /* end of making mycode shortcode work */ /* wrapped the whole output from the highlight function (which is a table) in a div to get a horizontal scrollbar for the whole row and not just the "pre" in the cell having a scrollbar for the cell will break alignment with line numbers */ div.codewrapper { overflow-x: auto; overflow-y: hidden; background-color: #002B36; } /* overflow for backtick code block by just modifying div.highlight I will screw the codecaption overflow as they have div.highlight too. This ">" syntax is something new that I learned. basically it will select only div.highlights that are direct childs of entry-content */ div.entry-content > div.highlight { border-color: #002B36; overflow-x: auto; overflow-y: hidden; margin-bottom: 0.4em; } /* decrease h1 size in article body so we can use it in the article as heading */ div.entry-content > h1 { font-size:1.7em; } div.entry-content > h2 { font-size:1.5em; } /* decrease margin for article headings */ div.entry-content > h1, h2, h3, h4, h5, h6 { margin-bottom: 0.5em; margin-top: 0.8em; } /* side bar code from hyde-x - start */ /* * Sidebar * * Flexible banner for housing site name, intro, and "footer" content. Starts * out above content in mobile and later moves to the side with wider viewports. */ .sidebar { /* text-align: center; */ padding: 2rem 1rem; color: rgba(255,255,255,.5); background-color: #202020; } /* aligns the side bar to the left, we want it to be on the right @media (min-width: 48em) { .sidebar { position: fixed; top: 0; left: 0; bottom: 0; width: 18rem; text-align: left; } } */ /* Sidebar links */ /* Sticky sidebar * * Add the `sidebar-sticky` class to the sidebar's container to affix it the * contents to the bottom of the sidebar in tablets and up. */ @media (min-width: 48em) { .sidebar-sticky { position: absolute; right: 1rem; bottom: 1rem; left: 1rem; } } /* side bar code from hyde-x - end */ /* making the gist work - start */ .gist .gist-meta { font-size: 90% !important; color: #586069 !important; background-color: #f7f7f7; border-radius: 0 0 2px 2px; border: 1px solid #a5a5a5 !important; background: #aaa top repeat-x !important; -moz-border-radius-bottomleft: 5px !important; -webkit-border-bottom-left-radius: 5px !important; border-bottom-left-radius: 5px !important; -moz-border-radius-bottomright: 5px !important; -webkit-border-bottom-right-radius: 5px !important; border-bottom-right-radius: 5px !important; } /* Force gist linenos to be the same size as code linenos */ .gist .blob-num { font-size: 0.938em !important; } .gist .highlight, .gist .blob-code-inner { font-size: 0.938em !important; /* oh lol what have I done? */ font-family: Menlo,Monaco,"Andale Mono","lucida console","Courier New",monospace !important; line-height: 1.25em !important; } /* credit: https://gist.github.com/RomkeVdMeulen/889d44d3d4c5a11002c57ca068d295ec */ .gist .pl-c1, .gist .pl-s .pl-v { color: #719e07 !important; } .gist .gist-meta { background-color: #073642 !important; color: #93a1a1 !important; } .gist .gist-meta a { color: #268bd2 !important; } .gist .gist-data, .gist .highlight { background-color: #002b36 !important; color: #93a1a1 !important; } .gist .pl-smi, .gist .pl-s .pl-s1, .gist .blob-code-inner, .gist .blob-num { color: #93a1a1 !important; } .gist .pl-k { color: #719e07 !important; } .gist .pl-e, .gist .pl-en { color: #cb4b16 !important; } .gist .pl-s, .gist .pl-pds, .gist .pl-s .pl-pse .pl-s1, .gist .pl-sr, .gist .pl-sr .pl-cce, .gist .pl-sr .pl-sre, .gist .pl-sr .pl-sra { color: #2aa198 !important; } .gist .pl-s, .gist .pl-pds, .gist .pl-s .pl-pse .pl-s1, .gist .pl-sr, .gist .pl-sr .pl-cce, .gist .pl-sr .pl-sre, .gist .pl-sr .pl-sra, .gist .pl-e, .gist .pl-en, .gist .pl-k, .gist .pl-smi, .gist .pl-s .pl-s1, .gist .blob-code-inner, .gist .blob-num, .gist .gist-data, .gist .highlight, .gist .pl-c1, .gist .pl-s .pl-v, .gist .pl-c /* comments */ { font-family: Menlo,Monaco,"Andale Mono","lucida console","Courier New",monospace !important; } /* credit gist - end*/ /* making the gist work - end */ /* Hugo Figure shortcode - start */ figure figcaption { background: #fff; text-align: center; font-size: 1em; color: #666; display: block; } figure { color: #666; } figure a { color: #666; text-align: center; text-decoration: none; display: inline-block; white-space: nowrap; } /* Hugo Figure shortcode - end */
Nurmukhamed/www-hdfilm-kz-octopress
themes/Hugo-Octopress/static/css/hugo-octopress.css
CSS
unlicense
40,401
import fechbase class Records(fechbase.RecordsBase): def __init__(self): fechbase.RecordsBase.__init__(self) self.fields = [ {'name': 'FORM TYPE', 'number': '1'}, {'name': 'FILER COMMITTEE ID NUMBER', 'number': '2'}, {'name': 'ENTITY TYPE', 'number': '3'}, {'name': 'ORGANIZATION NAME', 'number': '4'}, {'name': 'INDIVIDUAL LAST NAME', 'number': '5'}, {'name': 'INDIVIDUAL FIRST NAME', 'number': '6'}, {'name': 'INDIVIDUAL MIDDLE NAME', 'number': '7'}, {'name': 'INDIVIDUAL PREFIX', 'number': '8'}, {'name': 'INDIVIDUAL SUFFIX', 'number': '9'}, {'name': 'CHANGE OF ADDRESS', 'number': '10'}, {'name': 'STREET 1', 'number': '11'}, {'name': 'STREET 2', 'number': '12'}, {'name': 'CITY', 'number': '13'}, {'name': 'STATE', 'number': '14'}, {'name': 'ZIP', 'number': '15'}, {'name': 'INDIVIDUAL EMPLOYER', 'number': '16'}, {'name': 'INDIVIDUAL OCCUPATION', 'number': '17'}, {'name': 'COVERAGE FROM DATE', 'number': '18'}, {'name': 'COVERAGE THROUGH DATE', 'number': '19'}, {'name': 'DATE OF PUBLIC DISTRIBUTION', 'number': '20'}, {'name': 'COMMUNICATION TITLE', 'number': '21'}, {'name': 'FILER CODE', 'number': '22'}, {'name': 'FILER CODE DESCRIPTION', 'number': '23'}, {'name': 'SEGREGATED BANK ACCOUNT', 'number': '24'}, {'name': 'CUSTODIAN LAST NAME', 'number': '25'}, {'name': 'CUSTODIAN FIRST NAME', 'number': '26'}, {'name': 'CUSTODIAN MIDDLE NAME', 'number': '27'}, {'name': 'CUSTODIAN PREFIX', 'number': '28'}, {'name': 'CUSTODIAN SUFFIX', 'number': '29'}, {'name': 'CUSTODIAN STREET 1', 'number': '30'}, {'name': 'CUSTODIAN STREET 2', 'number': '31'}, {'name': 'CUSTODIAN CITY', 'number': '32'}, {'name': 'CUSTODIAN STATE', 'number': '33'}, {'name': 'CUSTODIAN ZIP', 'number': '34'}, {'name': 'CUSTODIAN EMPLOYER', 'number': '35'}, {'name': 'CUSTODIAN OCCUPATION', 'number': '36'}, {'name': 'TOTAL DONATIONS THIS STATEMENT', 'number': '37-9.'}, {'name': 'TOTAL DISB./OBLIG. THIS STATEMENT', 'number': '38-10.'}, {'name': 'PERSON COMPLETING LAST NAME', 'number': '39'}, {'name': 'PERSON COMPLETING FIRST NAME', 'number': '40'}, {'name': 'PERSON COMPLETING MIDDLE NAME', 'number': '41'}, {'name': 'PERSON COMPLETING PREFIX', 'number': '42'}, {'name': 'PERSON COMPLETING SUFFIX', 'number': '43'}, {'name': 'DATE SIGNED', 'number': '44'}, ] self.fields_names = self.hash_names(self.fields)
h4ck3rm1k3/FEC-Field-Documentation
fec/version/v8_0/F9.py
Python
unlicense
2,852
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.collector.receiver.udp; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.util.ArrayList; import java.util.List; import org.apache.thrift.TBase; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import com.navercorp.pinpoint.collector.receiver.AbstractDispatchHandler; import com.navercorp.pinpoint.collector.receiver.DataReceiver; import com.navercorp.pinpoint.common.Version; import com.navercorp.pinpoint.common.trace.ServiceType; import com.navercorp.pinpoint.profiler.AgentInformation; import com.navercorp.pinpoint.profiler.context.Span; import com.navercorp.pinpoint.profiler.context.SpanChunk; import com.navercorp.pinpoint.profiler.context.SpanChunkFactory; import com.navercorp.pinpoint.profiler.context.SpanEvent; import com.navercorp.pinpoint.profiler.sender.SpanStreamUdpSender; import com.navercorp.pinpoint.thrift.dto.TResult; import com.navercorp.pinpoint.thrift.dto.TSpan; import com.navercorp.pinpoint.thrift.dto.TSpanChunk; import com.navercorp.pinpoint.thrift.dto.TSpanEvent; /** * @author emeroad */ public class SpanStreamUDPSenderTest { private static MessageHolderDispatchHandler messageHolder; private static DataReceiver receiver = null; private static int port; @BeforeClass public static void setUp() throws IOException { port = getAvaiableUDPPort(21111); try { messageHolder = new MessageHolderDispatchHandler(); receiver = new TestUDPReceiver("test", new SpanStreamUDPPacketHandlerFactory<DatagramPacket>(messageHolder, new TestTBaseFilter()), "127.0.0.1", port, 1024, 1, 10); receiver.start(); } catch (Exception e) { } } @AfterClass public static void tearDown() { if (receiver != null) { receiver.shutdown(); } } private static int getAvaiableUDPPort(int defaultPort) throws IOException { int bindPort = defaultPort; DatagramSocket dagagramSocket = null; while (0xFFFF >= bindPort && dagagramSocket == null) { try { dagagramSocket = new DatagramSocket(bindPort); } catch (IOException ex) { bindPort++; } } if (dagagramSocket != null) { dagagramSocket.close(); return bindPort; } throw new IOException("can't find available port."); } @Test public void sendTest1() throws InterruptedException { SpanStreamUdpSender sender = null; try { sender = new SpanStreamUdpSender("127.0.0.1", port, "threadName", 10); sender.send(createSpanChunk(10)); sender.send(createSpanChunk(3)); Thread.sleep(6000); List<TBase> tBaseList = messageHolder.getMessageHolder(); int spanChunkCount = getObjectCount(tBaseList, TSpanChunk.class); Assert.assertEquals(2, spanChunkCount); tBaseList.clear(); } finally { if (sender != null) { sender.stop(); } } } @Test public void sendTest2() throws InterruptedException { SpanStreamUdpSender sender = null; try { sender = new SpanStreamUdpSender("127.0.0.1", port, "threadName", 10); sender.send(createSpan(10)); sender.send(createSpan(3)); Thread.sleep(6000); List<TBase> tBaseList = messageHolder.getMessageHolder(); int spanCount = getObjectCount(tBaseList, TSpan.class); Assert.assertEquals(2, spanCount); tBaseList.clear(); } finally { if (sender != null) { sender.stop(); } } } @Test public void sendTest3() throws InterruptedException { SpanStreamUdpSender sender = null; try { sender = new SpanStreamUdpSender("127.0.0.1", port, "threadName", 10); sender.send(createSpan(10)); sender.send(createSpan(3)); sender.send(createSpanChunk(3)); Thread.sleep(6000); List<TBase> tBaseList = messageHolder.getMessageHolder(); int spanCount = getObjectCount(tBaseList, TSpan.class); int spanChunkCount = getObjectCount(tBaseList, TSpanChunk.class); Assert.assertEquals(2, spanCount); Assert.assertEquals(1, spanChunkCount); tBaseList.clear(); } finally { if (sender != null) { sender.stop(); } } } private Span createSpan(int spanEventSize) throws InterruptedException { AgentInformation agentInformation = new AgentInformation("agentId", "applicationName", 0, 0, "machineName", "127.0.0.1", ServiceType.STAND_ALONE, Version.VERSION); SpanChunkFactory spanChunkFactory = new SpanChunkFactory(agentInformation); List<SpanEvent> spanEventList = createSpanEventList(spanEventSize); Span span = new Span(); List<TSpanEvent> tSpanEventList = new ArrayList<TSpanEvent>(); for (SpanEvent spanEvent : spanEventList) { tSpanEventList.add(spanEvent); } span.setSpanEventList(tSpanEventList); return span; } private SpanChunk createSpanChunk(int spanEventSize) throws InterruptedException { AgentInformation agentInformation = new AgentInformation("agentId", "applicationName", 0, 0, "machineName", "127.0.0.1", ServiceType.STAND_ALONE, Version.VERSION); SpanChunkFactory spanChunkFactory = new SpanChunkFactory(agentInformation); List<SpanEvent> originalSpanEventList = createSpanEventList(spanEventSize); SpanChunk spanChunk = spanChunkFactory.create(originalSpanEventList); return spanChunk; } private int getObjectCount(List<TBase> tbaseList, Class clazz) { int count = 0; for (TBase t : tbaseList) { if (clazz.isInstance(t)) { count++; } } return count; } private List<SpanEvent> createSpanEventList(int size) throws InterruptedException { // Span span = new SpanBo(new TSpan()); Span span = new Span(); List<SpanEvent> spanEventList = new ArrayList<SpanEvent>(size); for (int i = 0; i < size; i++) { SpanEvent spanEvent = new SpanEvent(span); spanEvent.markStartTime(); Thread.sleep(1); spanEvent.markAfterTime(); spanEventList.add(spanEvent); } return spanEventList; } static class TestTBaseFilter<T> implements TBaseFilter<T> { @Override public boolean filter(TBase<?, ?> tBase, T remoteHostAddress) { System.out.println("filter"); return false; } } static class MessageHolderDispatchHandler extends AbstractDispatchHandler { private List<TBase> messageHolder = new ArrayList<TBase>(); @Override public void dispatchSendMessage(TBase<?, ?> tBase) { System.out.println("dispatchSendMessage"); } @Override public TBase dispatchRequestMessage(TBase<?, ?> tBase) { messageHolder.add(tBase); return new TResult(true); } public List<TBase> getMessageHolder() { return messageHolder; } } }
shuvigoss/pinpoint
collector/src/test/java/com/navercorp/pinpoint/collector/receiver/udp/SpanStreamUDPSenderTest.java
Java
apache-2.0
8,198
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildRuleType; import com.facebook.buck.rules.BuildTargetSourcePath; import com.facebook.buck.rules.InstallableApk; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.shell.AbstractGenruleDescription; import com.facebook.buck.util.HumanReadableException; import com.facebook.infer.annotation.SuppressFieldNotInitialized; import com.google.common.base.Optional; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedSet; public class ApkGenruleDescription extends AbstractGenruleDescription<ApkGenruleDescription.Arg> { public static final BuildRuleType TYPE = BuildRuleType.of("apk_genrule"); @Override public BuildRuleType getBuildRuleType() { return TYPE; } @Override public Arg createUnpopulatedConstructorArg() { return new Arg(); } @Override protected <A extends ApkGenruleDescription.Arg> BuildRule createBuildRule( BuildRuleParams params, BuildRuleResolver resolver, A args, ImmutableList<SourcePath> srcs, Optional<com.facebook.buck.rules.args.Arg> cmd, Optional<com.facebook.buck.rules.args.Arg> bash, Optional<com.facebook.buck.rules.args.Arg> cmdExe, String out) { final BuildRule installableApk = resolver.getRule(args.apk); if (!(installableApk instanceof InstallableApk)) { throw new HumanReadableException("The 'apk' argument of %s, %s, must correspond to an " + "installable rule, such as android_binary() or apk_genrule().", params.getBuildTarget(), args.apk.getFullyQualifiedName()); } final Supplier<ImmutableSortedSet<BuildRule>> originalExtraDeps = params.getExtraDeps(); return new ApkGenrule( params.copyWithExtraDeps( Suppliers.memoize( new Supplier<ImmutableSortedSet<BuildRule>>() { @Override public ImmutableSortedSet<BuildRule> get() { return ImmutableSortedSet.<BuildRule>naturalOrder() .addAll(originalExtraDeps.get()) .add(installableApk) .build(); } })), new SourcePathResolver(resolver), srcs, cmd, bash, cmdExe, new BuildTargetSourcePath(args.apk), args.tests.get()); } @SuppressFieldNotInitialized public static class Arg extends AbstractGenruleDescription.Arg { public BuildTarget apk; } }
Dominator008/buck
src/com/facebook/buck/android/ApkGenruleDescription.java
Java
apache-2.0
3,481
/* Derby - Class org.apache.derby.authentication.SystemPrincipal Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.authentication; import java.io.Serializable; import java.security.Principal; /** * This class represents Derby's notion of a principal, a concept of * user identity with controlled access to Derby System Privileges. * An authenticated user may have other identities which make sense in * other code domains. * <p> * Note that principal names do NOT follow Authorization Identifier rules. * For instance, although edward and edWard both match the normalized * authorization identifier EDWARD, the instances * <code>SystemPrincipal("edward")</code> and * <code>SystemPrincipal("edWard")</code> represent different principals * under the methods <code>getName()</code>, <code>equals()</code>, and * <code>hashCode()</code>. * <p> * According to JAASRefGuide, Principal classes must implement Serializable. * * @see Principal#getName * @see <a href="http://java.sun.com/javase/6/docs/technotes/guides/security/jaas/JAASRefGuide.html#Principals">JAASRefGuide on Principals</a> */ final public class SystemPrincipal implements Principal, Serializable { /** * BTW, this class currently does not require special handling during * serialization/deserialization, so, there's no need to define methods * <code>readObject(ObjectInputStream)</code> and * <code>writeObject(ObjectOutputStream)</code>. */ static final long serialVersionUID = 925380094921530190L; /** * The name of the principal. * <p> * Note that the name is not a "normalized" Authorization Identifier. * This is due to peculiarities of the Java Security Runtime, which * compares a <code>javax.security.auth.Subject</code>'s Principals * against the literal Principal name as declared in the policy files, * and not against the return value of method <code>getName()</code>. * So, a normalization of names within SystemPrincipal doesn't affect * permission checking by the SecurityManager. * <p> * In order for a <code>javax.security.auth.Subject</code> to be * granted permissions on the basis Authorization Identifier rules, e.g., * for a Subject authenticated as edWard to fall under a policy clause * declared for EDWARD, the Subject has to be constructed (or augmented) * with both the literal name and the normalized Authorization Identifier. * <p> * As an alternative approach, class <code>SystemPrincipal</code> could * implement the non-standard interface * <code>com.sun.security.auth.PrincipalComparator</code>, which declares * a method <code>implies(Subject)<code> that would allow for Principals * to match Subjects on the basis of normalized Authorization Identifiers. * But then we'd be relying upon non-standard Security Runtime behaviour. * * @see <a href="http://wiki.apache.org/db-derby/UserIdentifiers">User Names & Authorization Identifiers in Derby</a> */ private final String name; /** * Constructs a principal for a given name. * * @param name the name of the principal * @throws NullPointerException if name is null * @throws IllegalArgumentException if name is not a legal Principal name */ public SystemPrincipal(String name) { // RuntimeException messages not localized if (name == null) { throw new NullPointerException("name can't be null"); } if (name.length() == 0) { throw new IllegalArgumentException("name can't be empty"); } this.name = name; } /** * Compares this principal to the specified object. Returns true if * the object passed in matches the principal represented by the * implementation of this interface. * * @param other principal to compare with * @return true if the principal passed in is the same as that * encapsulated by this principal, and false otherwise * @see Principal#equals */ public boolean equals(Object other) { if (other == null) { return false; } if (!(other instanceof SystemPrincipal)) { return false; } final SystemPrincipal that = (SystemPrincipal)other; return name.equals(that.name); } /** * Returns the name of this principal. * * @return the name of this principal * @see Principal#getName() */ public String getName() { return name; } /** * Returns a hashcode for this principal. * * @return a hashcode for this principal * @see Principal#hashCode() */ public int hashCode() { return name.hashCode(); } /** * Returns a string representation of this principal. * * @return a string representation of this principal * @see Principal#toString() */ public String toString() { return getClass().getName() + "(" + name + ")"; } }
kavin256/Derby
java/engine/org/apache/derby/authentication/SystemPrincipal.java
Java
apache-2.0
5,828
# # Cookbook Name:: lita # Libraries:: typecast # # LitaHelpers module LitaHelpers # Try hard to find a correct type def typecast(obj) # This could definitely be normalized down to a simpler flow, but I keep # thinking I'm going to find some exception that needs the granular detail if obj.is_a?(Hash) || obj.is_a?(Array) # hashes and arrays will be inspected to display properly obj.inspect elsif obj.is_a?(Fixnum) || obj.is_a?(Float) || obj.is_a?(TrueClass) || obj.is_a?(FalseClass) # these are just plain (that is obj.to_s does what we want) obj elsif obj.is_a?(Symbol) # symbols should remain symbols ":#{obj}" else # must be a string so let's try to convert to a ruby type case obj when /^:/ # found symbol obj when /^\{/ # found hash obj when /^\[/ # found array obj when /^true$|^false$/ # found boolean obj when /^[0-9]+\.[0-9]+$/ # found Float obj when /^[0-9]+$/ # found Fixnum obj else # leave it as a string "\"#{obj}\"" end end end end
onetwotrip/chef-lita
libraries/helpers.rb
Ruby
apache-2.0
1,210
import Ember from 'ember'; import { sumBy } from 'lodash'; const { Controller, computed } = Ember; export default Controller.extend({ ticketsTotal: computed(function() { return sumBy(this.get('model'), 'tickets'); }), salesTotal: computed(function() { return sumBy(this.get('model'), 'sales'); }), discountsTotal: computed(function() { return sumBy(this.get('model'), 'discounts'); }) });
sumedh123/open-event-frontend
app/controllers/admin/sales/marketer.js
JavaScript
apache-2.0
415
({ L_MENU_GRID: "M\u0159\u00ed\u017eka nab\u00eddky", L_MENU_ITEM_DISABLED: "Polo\u017eka %1 nen\u00ed dostupn\u00e1.", L_MENU_ITEM_SUBMENU: "Podnab\u00eddka %1", L_MENU_SUBMENU: "podnab\u00eddka", L_MENU_CHECK: "za\u0161krtnout" })
paulswithers/XPagesExtensionLibrary
extlib/lwp/product/runtime/eclipse/plugins/com.ibm.xsp.extlib.domino/resources/web/dwa/common/nls/cs/menu.js
JavaScript
apache-2.0
233
#subject-result-tables .subject-heading { color: #444; font-weight: bold; margin: 30px 0 5px 0; } .subject-results-th { background-color: #E7EEFF; border-bottom: 1px solid #C4D2FE; border-top: 1px solid #C4D2FE; color: #444; font-size: 0.8em; height: 1.5em; text-transform: uppercase; font-weight: bold; padding: 0.5em 0 0 0; } .subject-results-tr { background-color: #E7EEFF; border-bottom: 1px solid #C4D2FE; font-size:0.9em; height: 1.5em; padding: 0.5em 0 0 0; } .subject-results-tf { background-color: #E7EEFF; border-bottom: 1px solid #C4D2FE; border-top: 2px solid #C4D2FE; font-size:0.9em; height: 1.5em; padding: 0.5em 0 0 0; } .subject-results-exam { float:right; padding: 0 30px 0 0; width: 180px; } .subject-results-marks { float:right; width: 80px; } .subject-results-maxmarks { float:right; width: 80px; } .subject-results-grade { float:right; width: 80px; } .subject-results-percentage { float:right; }
prafula/FedenaT
public/stylesheets/rtl/student/subject_wise_report.css
CSS
apache-2.0
1,103
// Copyright (c) 2011-present, Facebook, Inc. All rights reserved. // This source code is licensed under both the GPLv2 (found in the // COPYING file in the root directory) and Apache 2.0 License // (found in the LICENSE.Apache file in the root directory). // // Copyright (c) 2011 The LevelDB Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. See the AUTHORS file for names of contributors. #include <functional> #include <string> #include <utility> #include <vector> #include "db/column_family.h" #include "db/db_iter.h" #include "db/db_test_util.h" #include "db/dbformat.h" #include "db/write_batch_internal.h" #include "port/port.h" #include "port/stack_trace.h" #include "util/string_util.h" #include "utilities/merge_operators.h" namespace rocksdb { // kTypeBlobIndex is a value type used by BlobDB only. The base rocksdb // should accept the value type on write, and report not supported value // for reads, unless caller request for it explicitly. The base rocksdb // doesn't understand format of actual blob index (the value). class DBBlobIndexTest : public DBTestBase { public: enum Tier { kMemtable = 0, kImmutableMemtables = 1, kL0SstFile = 2, kLnSstFile = 3, }; const std::vector<Tier> kAllTiers = {Tier::kMemtable, Tier::kImmutableMemtables, Tier::kL0SstFile, Tier::kLnSstFile}; DBBlobIndexTest() : DBTestBase("/db_blob_index_test") {} ColumnFamilyHandle* cfh() { return dbfull()->DefaultColumnFamily(); } ColumnFamilyData* cfd() { return reinterpret_cast<ColumnFamilyHandleImpl*>(cfh())->cfd(); } Status PutBlobIndex(WriteBatch* batch, const Slice& key, const Slice& blob_index) { return WriteBatchInternal::PutBlobIndex(batch, cfd()->GetID(), key, blob_index); } Status Write(WriteBatch* batch) { return dbfull()->Write(WriteOptions(), batch); } std::string GetImpl(const Slice& key, bool* is_blob_index = nullptr, const Snapshot* snapshot = nullptr) { ReadOptions read_options; read_options.snapshot = snapshot; PinnableSlice value; auto s = dbfull()->GetImpl(read_options, cfh(), key, &value, nullptr /*value_found*/, is_blob_index); if (s.IsNotFound()) { return "NOT_FOUND"; } if (s.IsNotSupported()) { return "NOT_SUPPORTED"; } if (!s.ok()) { return s.ToString(); } return value.ToString(); } std::string GetBlobIndex(const Slice& key, const Snapshot* snapshot = nullptr) { bool is_blob_index = false; std::string value = GetImpl(key, &is_blob_index, snapshot); if (!is_blob_index) { return "NOT_BLOB"; } return value; } ArenaWrappedDBIter* GetBlobIterator() { return dbfull()->NewIteratorImpl(ReadOptions(), cfd(), dbfull()->GetLatestSequenceNumber(), true /*allow_blob*/); } Options GetTestOptions() { Options options; options.create_if_missing = true; options.num_levels = 2; options.disable_auto_compactions = true; // Disable auto flushes. options.max_write_buffer_number = 10; options.min_write_buffer_number_to_merge = 10; options.merge_operator = MergeOperators::CreateStringAppendOperator(); return options; } void MoveDataTo(Tier tier) { switch (tier) { case Tier::kMemtable: break; case Tier::kImmutableMemtables: ASSERT_OK(dbfull()->TEST_SwitchMemtable()); break; case Tier::kL0SstFile: ASSERT_OK(Flush()); break; case Tier::kLnSstFile: ASSERT_OK(Flush()); ASSERT_OK(Put("a", "dummy")); ASSERT_OK(Put("z", "dummy")); ASSERT_OK(Flush()); ASSERT_OK( dbfull()->CompactRange(CompactRangeOptions(), nullptr, nullptr)); #ifndef ROCKSDB_LITE ASSERT_EQ("0,1", FilesPerLevel()); #endif // !ROCKSDB_LITE break; } } }; // Should be able to write kTypeBlobIndex to memtables and SST files. TEST_F(DBBlobIndexTest, Write) { for (auto tier : kAllTiers) { DestroyAndReopen(GetTestOptions()); for (int i = 1; i <= 5; i++) { std::string index = ToString(i); WriteBatch batch; ASSERT_OK(PutBlobIndex(&batch, "key" + index, "blob" + index)); ASSERT_OK(Write(&batch)); } MoveDataTo(tier); for (int i = 1; i <= 5; i++) { std::string index = ToString(i); ASSERT_EQ("blob" + index, GetBlobIndex("key" + index)); } } } // Get should be able to return blob index if is_blob_index is provided, // otherwise return Status::NotSupported status. TEST_F(DBBlobIndexTest, Get) { for (auto tier : kAllTiers) { DestroyAndReopen(GetTestOptions()); WriteBatch batch; ASSERT_OK(batch.Put("key", "value")); ASSERT_OK(PutBlobIndex(&batch, "blob_key", "blob_index")); ASSERT_OK(Write(&batch)); MoveDataTo(tier); // Verify normal value bool is_blob_index = false; PinnableSlice value; ASSERT_EQ("value", Get("key")); ASSERT_EQ("value", GetImpl("key")); ASSERT_EQ("value", GetImpl("key", &is_blob_index)); ASSERT_FALSE(is_blob_index); // Verify blob index ASSERT_TRUE(Get("blob_key", &value).IsNotSupported()); ASSERT_EQ("NOT_SUPPORTED", GetImpl("blob_key")); ASSERT_EQ("blob_index", GetImpl("blob_key", &is_blob_index)); ASSERT_TRUE(is_blob_index); } } // Get should NOT return Status::NotSupported if blob index is updated with // a normal value. TEST_F(DBBlobIndexTest, Updated) { for (auto tier : kAllTiers) { DestroyAndReopen(GetTestOptions()); WriteBatch batch; for (int i = 0; i < 10; i++) { ASSERT_OK(PutBlobIndex(&batch, "key" + ToString(i), "blob_index")); } ASSERT_OK(Write(&batch)); // Avoid blob values from being purged. const Snapshot* snapshot = dbfull()->GetSnapshot(); ASSERT_OK(Put("key1", "new_value")); ASSERT_OK(Merge("key2", "a")); ASSERT_OK(Merge("key2", "b")); ASSERT_OK(Merge("key2", "c")); ASSERT_OK(Delete("key3")); ASSERT_OK(SingleDelete("key4")); ASSERT_OK(Delete("key5")); ASSERT_OK(Merge("key5", "a")); ASSERT_OK(Merge("key5", "b")); ASSERT_OK(Merge("key5", "c")); ASSERT_OK(dbfull()->DeleteRange(WriteOptions(), cfh(), "key6", "key9")); MoveDataTo(tier); for (int i = 0; i < 10; i++) { ASSERT_EQ("blob_index", GetBlobIndex("key" + ToString(i), snapshot)); } ASSERT_EQ("new_value", Get("key1")); ASSERT_EQ("NOT_SUPPORTED", GetImpl("key2")); ASSERT_EQ("NOT_FOUND", Get("key3")); ASSERT_EQ("NOT_FOUND", Get("key4")); ASSERT_EQ("a,b,c", GetImpl("key5")); for (int i = 6; i < 9; i++) { ASSERT_EQ("NOT_FOUND", Get("key" + ToString(i))); } ASSERT_EQ("blob_index", GetBlobIndex("key9")); dbfull()->ReleaseSnapshot(snapshot); } } // Iterator should get blob value if allow_blob flag is set, // otherwise return Status::NotSupported status. TEST_F(DBBlobIndexTest, Iterate) { const std::vector<std::vector<ValueType>> data = { /*00*/ {kTypeValue}, /*01*/ {kTypeBlobIndex}, /*02*/ {kTypeValue}, /*03*/ {kTypeBlobIndex, kTypeValue}, /*04*/ {kTypeValue}, /*05*/ {kTypeValue, kTypeBlobIndex}, /*06*/ {kTypeValue}, /*07*/ {kTypeDeletion, kTypeBlobIndex}, /*08*/ {kTypeValue}, /*09*/ {kTypeSingleDeletion, kTypeBlobIndex}, /*10*/ {kTypeValue}, /*11*/ {kTypeMerge, kTypeMerge, kTypeMerge, kTypeBlobIndex}, /*12*/ {kTypeValue}, /*13*/ {kTypeMerge, kTypeMerge, kTypeMerge, kTypeDeletion, kTypeBlobIndex}, /*14*/ {kTypeValue}, /*15*/ {kTypeBlobIndex}, /*16*/ {kTypeValue}, }; auto get_key = [](int index) { char buf[20]; snprintf(buf, sizeof(buf), "%02d", index); return "key" + std::string(buf); }; auto get_value = [&](int index, int version) { return get_key(index) + "_value" + ToString(version); }; auto check_iterator = [&](Iterator* iterator, Status::Code expected_status, const Slice& expected_value) { ASSERT_EQ(expected_status, iterator->status().code()); if (expected_status == Status::kOk) { ASSERT_TRUE(iterator->Valid()); ASSERT_EQ(expected_value, iterator->value()); } else { ASSERT_FALSE(iterator->Valid()); } }; auto create_normal_iterator = [&]() -> Iterator* { return dbfull()->NewIterator(ReadOptions()); }; auto create_blob_iterator = [&]() -> Iterator* { return GetBlobIterator(); }; auto check_is_blob = [&](bool is_blob) { return [is_blob](Iterator* iterator) { ASSERT_EQ(is_blob, reinterpret_cast<ArenaWrappedDBIter*>(iterator)->IsBlob()); }; }; auto verify = [&](int index, Status::Code expected_status, const Slice& forward_value, const Slice& backward_value, std::function<Iterator*()> create_iterator, std::function<void(Iterator*)> extra_check = nullptr) { // Seek auto* iterator = create_iterator(); ASSERT_OK(iterator->Refresh()); iterator->Seek(get_key(index)); check_iterator(iterator, expected_status, forward_value); if (extra_check) { extra_check(iterator); } delete iterator; // Next iterator = create_iterator(); ASSERT_OK(iterator->Refresh()); iterator->Seek(get_key(index - 1)); ASSERT_TRUE(iterator->Valid()); iterator->Next(); check_iterator(iterator, expected_status, forward_value); if (extra_check) { extra_check(iterator); } delete iterator; // SeekForPrev iterator = create_iterator(); ASSERT_OK(iterator->Refresh()); iterator->SeekForPrev(get_key(index)); check_iterator(iterator, expected_status, backward_value); if (extra_check) { extra_check(iterator); } delete iterator; // Prev iterator = create_iterator(); iterator->Seek(get_key(index + 1)); ASSERT_TRUE(iterator->Valid()); iterator->Prev(); check_iterator(iterator, expected_status, backward_value); if (extra_check) { extra_check(iterator); } delete iterator; }; for (auto tier : {Tier::kMemtable} /*kAllTiers*/) { // Avoid values from being purged. std::vector<const Snapshot*> snapshots; DestroyAndReopen(GetTestOptions()); // fill data for (int i = 0; i < static_cast<int>(data.size()); i++) { for (int j = static_cast<int>(data[i].size()) - 1; j >= 0; j--) { std::string key = get_key(i); std::string value = get_value(i, j); WriteBatch batch; switch (data[i][j]) { case kTypeValue: ASSERT_OK(Put(key, value)); break; case kTypeDeletion: ASSERT_OK(Delete(key)); break; case kTypeSingleDeletion: ASSERT_OK(SingleDelete(key)); break; case kTypeMerge: ASSERT_OK(Merge(key, value)); break; case kTypeBlobIndex: ASSERT_OK(PutBlobIndex(&batch, key, value)); ASSERT_OK(Write(&batch)); break; default: assert(false); }; } snapshots.push_back(dbfull()->GetSnapshot()); } ASSERT_OK( dbfull()->DeleteRange(WriteOptions(), cfh(), get_key(15), get_key(16))); snapshots.push_back(dbfull()->GetSnapshot()); MoveDataTo(tier); // Normal iterator verify(1, Status::kNotSupported, "", "", create_normal_iterator); verify(3, Status::kNotSupported, "", "", create_normal_iterator); verify(5, Status::kOk, get_value(5, 0), get_value(5, 0), create_normal_iterator); verify(7, Status::kOk, get_value(8, 0), get_value(6, 0), create_normal_iterator); verify(9, Status::kOk, get_value(10, 0), get_value(8, 0), create_normal_iterator); verify(11, Status::kNotSupported, "", "", create_normal_iterator); verify(13, Status::kOk, get_value(13, 2) + "," + get_value(13, 1) + "," + get_value(13, 0), get_value(13, 2) + "," + get_value(13, 1) + "," + get_value(13, 0), create_normal_iterator); verify(15, Status::kOk, get_value(16, 0), get_value(14, 0), create_normal_iterator); // Iterator with blob support verify(1, Status::kOk, get_value(1, 0), get_value(1, 0), create_blob_iterator, check_is_blob(true)); verify(3, Status::kOk, get_value(3, 0), get_value(3, 0), create_blob_iterator, check_is_blob(true)); verify(5, Status::kOk, get_value(5, 0), get_value(5, 0), create_blob_iterator, check_is_blob(false)); verify(7, Status::kOk, get_value(8, 0), get_value(6, 0), create_blob_iterator, check_is_blob(false)); verify(9, Status::kOk, get_value(10, 0), get_value(8, 0), create_blob_iterator, check_is_blob(false)); verify(11, Status::kNotSupported, "", "", create_blob_iterator); verify(13, Status::kOk, get_value(13, 2) + "," + get_value(13, 1) + "," + get_value(13, 0), get_value(13, 2) + "," + get_value(13, 1) + "," + get_value(13, 0), create_blob_iterator, check_is_blob(false)); verify(15, Status::kOk, get_value(16, 0), get_value(14, 0), create_blob_iterator, check_is_blob(false)); for (auto* snapshot : snapshots) { dbfull()->ReleaseSnapshot(snapshot); } } } } // namespace rocksdb int main(int argc, char** argv) { rocksdb::port::InstallStackTraceHandler(); ::testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); }
benqiu2016/nifi-minifi-cpp
thirdparty/rocksdb/db/db_blob_index_test.cc
C++
apache-2.0
13,817
//===--- Util.h - Common Driver Utilities -----------------------*- C++ -*-===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// #ifndef SWIFT_DRIVER_UTIL_H #define SWIFT_DRIVER_UTIL_H #include "swift/Driver/Types.h" #include "swift/Basic/LLVM.h" #include "llvm/ADT/SmallVector.h" namespace llvm { namespace opt { class Arg; } // end namespace opt } // end namespace llvm namespace swift { namespace driver { /// An input argument from the command line and its inferred type. typedef std::pair<types::ID, const llvm::opt::Arg *> InputPair; /// Type used for a list of input arguments. typedef SmallVector<InputPair, 16> InputFileList; enum class LinkKind { None, Executable, DynamicLibrary }; /// Used by a Job to request a "filelist": a file containing a list of all /// input or output files of a certain type. /// /// The Compilation is responsible for generating this file before running /// the Job this info is attached to. struct FilelistInfo { enum WhichFiles : bool { Input, Output }; StringRef path; types::ID type; WhichFiles whichFiles; }; } // end namespace driver } // end namespace swift #endif
OscarSwanros/swift
include/swift/Driver/Util.h
C
apache-2.0
1,582
#include "editor/changeset_wrapper.hpp" #include "editor/osm_feature_matcher.hpp" #include "indexer/feature.hpp" #include "geometry/mercator.hpp" #include "base/logging.hpp" #include "base/macros.hpp" #include "std/algorithm.hpp" #include "std/random.hpp" #include "std/sstream.hpp" #include "private.h" using editor::XMLFeature; namespace { m2::RectD GetBoundingRect(vector<m2::PointD> const & geometry) { m2::RectD rect; for (auto const & p : geometry) { auto const latLon = MercatorBounds::ToLatLon(p); rect.Add({latLon.lon, latLon.lat}); } return rect; } bool OsmFeatureHasTags(pugi::xml_node const & osmFt) { return osmFt.child("tag"); } vector<string> const static kMainTags = {"amenity", "shop", "tourism", "historic", "craft", "emergency", "barrier", "highway", "office", "entrance", "building"}; string GetTypeForFeature(XMLFeature const & node) { for (string const & key : kMainTags) { if (node.HasTag(key)) { string const value = node.GetTagValue(key); if (value == "yes") return key; else if (key == "shop" || key == "office" || key == "building" || key == "entrance") return value + " " + key; // "convenience shop" else return value; } } // Did not find any known tags. return node.HasAnyTags() ? "unknown object" : "empty object"; } vector<m2::PointD> NaiveSample(vector<m2::PointD> const & source, size_t count) { count = min(count, source.size()); vector<m2::PointD> result; result.reserve(count); vector<size_t> indexes; indexes.reserve(count); minstd_rand engine; uniform_int_distribution<> distrib(0, source.size()); while (count--) { size_t index; do { index = distrib(engine); } while (find(begin(indexes), end(indexes), index) != end(indexes)); result.push_back(source[index]); indexes.push_back(index); } return result; } } // namespace namespace pugi { string DebugPrint(xml_document const & doc) { ostringstream stream; doc.print(stream, " "); return stream.str(); } } // namespace pugi namespace osm { ChangesetWrapper::ChangesetWrapper(TKeySecret const & keySecret, ServerApi06::TKeyValueTags const & comments) noexcept : m_changesetComments(comments), m_api(OsmOAuth::ServerAuth(keySecret)) { } ChangesetWrapper::~ChangesetWrapper() { if (m_changesetId) { try { m_changesetComments["comment"] = GetDescription(); m_api.UpdateChangeSet(m_changesetId, m_changesetComments); m_api.CloseChangeSet(m_changesetId); } catch (std::exception const & ex) { LOG(LWARNING, (ex.what())); } } } void ChangesetWrapper::LoadXmlFromOSM(ms::LatLon const & ll, pugi::xml_document & doc, double radiusInMeters) { auto const response = m_api.GetXmlFeaturesAtLatLon(ll.lat, ll.lon, radiusInMeters); if (response.first != OsmOAuth::HTTP::OK) MYTHROW(HttpErrorException, ("HTTP error", response, "with GetXmlFeaturesAtLatLon", ll)); if (pugi::status_ok != doc.load(response.second.c_str()).status) MYTHROW( OsmXmlParseException, ("Can't parse OSM server response for GetXmlFeaturesAtLatLon request", response.second)); } void ChangesetWrapper::LoadXmlFromOSM(ms::LatLon const & min, ms::LatLon const & max, pugi::xml_document & doc) { auto const response = m_api.GetXmlFeaturesInRect(min.lat, min.lon, max.lat, max.lon); if (response.first != OsmOAuth::HTTP::OK) MYTHROW(HttpErrorException, ("HTTP error", response, "with GetXmlFeaturesInRect", min, max)); if (pugi::status_ok != doc.load(response.second.c_str()).status) MYTHROW(OsmXmlParseException, ("Can't parse OSM server response for GetXmlFeaturesInRect request", response.second)); } XMLFeature ChangesetWrapper::GetMatchingNodeFeatureFromOSM(m2::PointD const & center) { // Match with OSM node. ms::LatLon const ll = MercatorBounds::ToLatLon(center); pugi::xml_document doc; // Throws! LoadXmlFromOSM(ll, doc); pugi::xml_node const bestNode = GetBestOsmNode(doc, ll); if (bestNode.empty()) { MYTHROW(OsmObjectWasDeletedException, ("OSM does not have any nodes at the coordinates", ll, ", server has returned:", doc)); } if (!OsmFeatureHasTags(bestNode)) { stringstream sstr; bestNode.print(sstr); LOG(LDEBUG, ("Node has no tags", sstr.str())); MYTHROW(EmptyFeatureException, ("Node has no tags")); } return XMLFeature(bestNode); } XMLFeature ChangesetWrapper::GetMatchingAreaFeatureFromOSM(vector<m2::PointD> const & geometry) { auto const kSamplePointsCount = 3; bool hasRelation = false; // Try several points in case of poor osm response. for (auto const & pt : NaiveSample(geometry, kSamplePointsCount)) { ms::LatLon const ll = MercatorBounds::ToLatLon(pt); pugi::xml_document doc; // Throws! LoadXmlFromOSM(ll, doc); if (doc.select_node("osm/relation")) { auto const rect = GetBoundingRect(geometry); LoadXmlFromOSM(ms::LatLon(rect.minY(), rect.minX()), ms::LatLon(rect.maxY(), rect.maxX()), doc); hasRelation = true; } pugi::xml_node const bestWayOrRelation = GetBestOsmWayOrRelation(doc, geometry); if (!bestWayOrRelation) { if (hasRelation) break; continue; } if (strcmp(bestWayOrRelation.name(), "relation") == 0) { stringstream sstr; bestWayOrRelation.print(sstr); LOG(LDEBUG, ("Relation is the best match", sstr.str())); MYTHROW(RelationFeatureAreNotSupportedException, ("Got relation as the best matching")); } if (!OsmFeatureHasTags(bestWayOrRelation)) { stringstream sstr; bestWayOrRelation.print(sstr); LOG(LDEBUG, ("Way or relation has no tags", sstr.str())); MYTHROW(EmptyFeatureException, ("Way or relation has no tags")); } // TODO: rename to wayOrRelation when relations are handled. XMLFeature const way(bestWayOrRelation); ASSERT(way.IsArea(), ("Best way must be an area.")); // AlexZ: TODO: Check that this way is really match our feature. // If we had some way to check it, why not to use it in selecting our feature? return way; } MYTHROW(OsmObjectWasDeletedException, ("OSM does not have any matching way for feature")); } void ChangesetWrapper::Create(XMLFeature node) { if (m_changesetId == kInvalidChangesetId) m_changesetId = m_api.CreateChangeSet(m_changesetComments); // Changeset id should be updated for every OSM server commit. node.SetAttribute("changeset", strings::to_string(m_changesetId)); // TODO(AlexZ): Think about storing/logging returned OSM ids. UNUSED_VALUE(m_api.CreateElement(node)); m_created_types[GetTypeForFeature(node)]++; } void ChangesetWrapper::Modify(XMLFeature node) { if (m_changesetId == kInvalidChangesetId) m_changesetId = m_api.CreateChangeSet(m_changesetComments); // Changeset id should be updated for every OSM server commit. node.SetAttribute("changeset", strings::to_string(m_changesetId)); m_api.ModifyElement(node); m_modified_types[GetTypeForFeature(node)]++; } void ChangesetWrapper::Delete(XMLFeature node) { if (m_changesetId == kInvalidChangesetId) m_changesetId = m_api.CreateChangeSet(m_changesetComments); // Changeset id should be updated for every OSM server commit. node.SetAttribute("changeset", strings::to_string(m_changesetId)); m_api.DeleteElement(node); m_deleted_types[GetTypeForFeature(node)]++; } string ChangesetWrapper::TypeCountToString(TTypeCount const & typeCount) { if (typeCount.empty()) return string(); // Convert map to vector and sort pairs by count, descending. vector<pair<string, size_t>> items; for (auto const & tc : typeCount) items.push_back(tc); sort(items.begin(), items.end(), [](pair<string, size_t> const & a, pair<string, size_t> const & b) { return a.second > b.second; }); ostringstream ss; auto const limit = min(size_t(3), items.size()); for (auto i = 0; i < limit; ++i) { if (i > 0) { // Separator: "A and B" for two, "A, B, and C" for three or more. if (limit > 2) ss << ", "; else ss << " "; if (i == limit - 1) ss << "and "; } auto & currentPair = items[i]; // If we have more objects left, make the last one a list of these. if (i == limit - 1 && limit < items.size()) { int count = 0; for (auto j = i; j < items.size(); ++j) count += items[j].second; currentPair = {"other object", count}; } // Format a count: "a shop" for single shop, "4 shops" for multiple. if (currentPair.second == 1) ss << "a "; else ss << currentPair.second << ' '; ss << currentPair.first; if (currentPair.second > 1) ss << 's'; } return ss.str(); } string ChangesetWrapper::GetDescription() const { string result; if (!m_created_types.empty()) result = "Created " + TypeCountToString(m_created_types); if (!m_modified_types.empty()) { if (!result.empty()) result += "; "; result += "Updated " + TypeCountToString(m_modified_types); } if (!m_deleted_types.empty()) { if (!result.empty()) result += "; "; result += "Deleted " + TypeCountToString(m_deleted_types); } return result; } } // namespace osm
stangls/omim
editor/changeset_wrapper.cpp
C++
apache-2.0
9,460
/* * see COPYRIGHT */ /* options */ extern int encode; /* encode the resulting file */ extern int pfbflag; /* produce compressed file */ extern int wantafm; /* want to see .afm instead of .t1a on stdout */ extern int correctvsize; /* try to correct the vertical size of characters */ extern int wantuid; /* user wants UniqueID entry in the font */ extern int allglyphs; /* convert all glyphs, not only 256 of them */ extern int warnlevel; /* the level of permitted warnings */ extern int forcemap; /* do mapping even on non-Unicode fonts */ /* options - maximal limits */ extern int max_stemdepth; /* maximal depth of stem stack in interpreter */ /* options - debugging */ extern int absolute; /* print out in absolute values */ extern int reverse; /* reverse font to Type1 path directions */ /* options - suboptions of Outline Processing */ extern int optimize; /* enables space optimization */ extern int smooth; /* enable smoothing of outlines */ extern int transform; /* enables transformation to 1000x1000 matrix */ extern int hints; /* enables autogeneration of hints */ extern int subhints; /* enables autogeneration of substituted hints */ extern int trybold; /* try to guess whether the font is bold */ extern int correctwidth; /* try to correct the character width */ extern int vectorize; /* vectorize the bitmaps */ extern int use_autotrace; /* use the autotrace library on bitmap */ /* options - suboptions of File Generation */ extern int gen_pfa; /* generate the font file */ extern int gen_afm; /* generate the metrics file */ extern int gen_dvienc; /* generate the dvips encoding file */ /* not quite options to select a particular source encoding */ extern int force_pid; /* specific platform id */ extern int force_eid; /* specific encoding id */ /* other globals */ extern FILE *null_file, *pfa_file, *afm_file, *ufm_file, *dvienc_file; extern int numglyphs; /* warnings */ #define WARNING_1 if(warnlevel >= 1) #define WARNING_2 if(warnlevel >= 2) #define WARNING_3 if(warnlevel >= 3) #define WARNING_4 if(warnlevel >= 4) /* * Bitmap control macros */ #define BITMAP_BYTES(size) (((size)+7)>>3) #define DEF_BITMAP(name, size) unsigned char name[BITMAP_BYTES(size)] #define SET_BITMAP(name, bit) ( name[(bit)>>3] |= (1<<((bit)&7)) ) #define CLR_BITMAP(name, bit) ( name[(bit)>>3] &= ~(1<<((bit)&7)) ) #define IS_BITMAP(name, bit) ( name[(bit)>>3] & (1<<((bit)&7)) ) /* debugging */ /* debug flags */ #define DEBUG_UNICODE 0x00000001 /* unicode to 8-bit code conversion */ #define DEBUG_MAINSTEMS 0x00000002 /* glyph-wide main stem generation */ #define DEBUG_SUBSTEMS 0x00000004 /* substituted stem generation */ #define DEBUG_STEMS (DEBUG_MAINSTEMS|DEBUG_SUBSTEMS) #define DEBUG_REVERSAL 0x00000008 /* reversal of the paths */ #define DEBUG_FIXCVDIR 0x00000010 /* fixcvdir() */ #define DEBUG_STEMOVERLAP 0x00000020 /* stemoverlap() */ #define DEBUG_BLUESTEMS 0x00000040 /* markbluestems() */ #define DEBUG_STRAIGHTEN 0x00000080 /* markbluestems() */ #define DEBUG_EXTMAP 0x00000100 /* parsing of external map */ #define DEBUG_TOINT 0x00000200 /* conversion of path to integer */ #define DEBUG_BUILDG 0x00000400 /* building of glyph path */ #define DEBUG_QUAD 0x00000800 /* splitting curves by quadrants */ #define DEBUG_SQEQ 0x00001000 /* square equation solver */ #define DEBUG_COMPOSITE 0x00002000 /* handling of composite glyphs */ #define DEBUG_FCONCISE 0x00004000 /* normalization of curves */ #define DEBUG_FT 0x00008000 /* FreeType front-end */ #define DEBUG_BITMAP 0x00010000 /* conversion from bitmap */ #define DEBUG_DISABLED 0x80000000 /* special flag: temporary disable debugging */ /* at what we want to look now */ #ifndef DEBUG # define DEBUG (0) #endif /* uncomment the next line if debugging data is wanted for one glyph only */ /* #define DBG_GLYPH "C118" /* */ #if DEBUG==0 # define ISDBG(name) (0) # define ENABLEDBG(condition) (0) # define DISABLEDBG(condition) (0) #else extern int debug; /* collection of the flags */ /* this ISDBG will only work on ANSI C, not K&R */ # define ISDBG(name) ( (debug & DEBUG_DISABLED) ? 0 : (debug & (DEBUG_##name)) ) # define ENABLEDBG(condition) ( (condition) ? (debug&=~DEBUG_DISABLED) : 0 ) # define DISABLEDBG(condition) ( (condition) ? (debug|=DEBUG_DISABLED) : 0 ) #endif #ifdef DBG_GLYPH # define DBG_TO_GLYPH(g) DISABLEDBG( strcmp( (g)->name, DBG_GLYPH ) ) # define DBG_FROM_GLYPH(g) ENABLEDBG(1) #else # define DBG_TO_GLYPH(g) (0) # define DBG_FROM_GLYPH(g) (0) #endif /* prototypes */ int iscale( int val); double fscale( double val); int unicode_rev_lookup( int unival); void bmp_outline( GLYPH *g, int scale, char *bmap, int xsz, int ysz, int xoff, int yoff); int isign( int x); int fsign( double x); char *dupcnstring( unsigned char *s, int len); /* global metrics for a font */ struct font_metrics { /* post */ double italic_angle; short underline_position; short underline_thickness; short is_fixed_pitch; /* hhea */ short ascender; short descender; /* head */ unsigned short units_per_em; short bbox[4]; /* name */ char *name_copyright; char *name_family; char *name_style; char *name_full; char *name_version; char *name_ps; /* other */ int force_bold; }; /* size of the encoding table - glyphs beyond 255 are actually unnumbered */ #define ENCTABSZ 1024 /* switch table structure for front-ends */ #define MAXSUFFIX 10 struct frontsw { char *name; /* name of the front end */ char *descr; /* description of the front end */ char *suffix[MAXSUFFIX]; /* possible file name suffixes */ void (*open)(char *fname, char *arg); /* open font file */ void (*close)(void); /* close font file */ int (*nglyphs)(void); /* get the number of glyphs */ int (*glnames)(GLYPH *glyphs); /* get the names of glyphs */ void (*glmetrics)(GLYPH *glyphs); /* get the metrics of glyphs */ int (*glenc)(GLYPH *glyphs, int *enc, int *unimap); /* get the encoding */ void (*fnmetrics)(struct font_metrics *fm); /* get the font metrics */ void (*glpath)(int glyphno, GLYPH *glyphs); /* get the glyph path */ void (*kerning)(GLYPH *glyph_list); /* extract the kerning data */ };
mrinsss/Full-Repo
tripezi/system/plugins/dompdf/lib/ttf2ufm/ttf2ufm-src/global.h
C
apache-2.0
6,437
import { test } from 'ember-qunit'; import moduleFor from 'open-event-frontend/tests/helpers/unit-helper'; moduleFor('route:admin/sessions', 'Unit | Route | admin/sessions', []); test('it exists', function(assert) { let route = this.subject(); assert.ok(route); });
sumedh123/open-event-frontend
tests/unit/routes/admin/sessions-test.js
JavaScript
apache-2.0
272
# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ FROM gcr.io/oss-fuzz-base/base-builder MAINTAINER kcwu@csie.org RUN apt-get update && apt-get install -y make autoconf automake libtool pkg-config RUN git clone --depth 1 git://people.freedesktop.org/~dvdhrm/libtsm WORKDIR libtsm COPY build.sh libtsm_fuzzer.c $SRC/
robertswiecki/oss-fuzz
projects/libtsm/Dockerfile
Dockerfile
apache-2.0
927
/* * Copyright (c) 2009-2013 by Farsight Security, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef WDNS_H #define WDNS_H #ifdef __cplusplus extern "C" { #endif #include <stdbool.h> #include <stdint.h> #include <stdio.h> /* Constants. */ #define WDNS_LEN_HEADER 12 #define WDNS_MAXLEN_NAME 255 #define WDNS_MSG_SEC_QUESTION 0 #define WDNS_MSG_SEC_ANSWER 1 #define WDNS_MSG_SEC_AUTHORITY 2 #define WDNS_MSG_SEC_ADDITIONAL 3 #define WDNS_MSG_SEC_MAX 4 #define WDNS_PRESLEN_NAME 1025 #define WDNS_PRESLEN_TYPE_A 16 #define WDNS_PRESLEN_TYPE_AAAA 46 #define WDNS_OP_QUERY 0 #define WDNS_OP_IQUERY 1 #define WDNS_OP_STATUS 2 #define WDNS_OP_NOTIFY 4 #define WDNS_OP_UPDATE 5 #define WDNS_R_NOERROR 0 #define WDNS_R_FORMERR 1 #define WDNS_R_SERVFAIL 2 #define WDNS_R_NXDOMAIN 3 #define WDNS_R_NOTIMP 4 #define WDNS_R_REFUSED 5 #define WDNS_R_YXDOMAIN 6 #define WDNS_R_YXRRSET 7 #define WDNS_R_NXRRSET 8 #define WDNS_R_NOTAUTH 9 #define WDNS_R_NOTZONE 10 #define WDNS_R_BADVERS 16 #define WDNS_CLASS_IN 1 #define WDNS_CLASS_CH 3 #define WDNS_CLASS_HS 4 #define WDNS_CLASS_NONE 254 #define WDNS_CLASS_ANY 255 #define WDNS_TYPE_A 1 #define WDNS_TYPE_NS 2 #define WDNS_TYPE_MD 3 #define WDNS_TYPE_MF 4 #define WDNS_TYPE_CNAME 5 #define WDNS_TYPE_SOA 6 #define WDNS_TYPE_MB 7 #define WDNS_TYPE_MG 8 #define WDNS_TYPE_MR 9 #define WDNS_TYPE_NULL 10 #define WDNS_TYPE_WKS 11 #define WDNS_TYPE_PTR 12 #define WDNS_TYPE_HINFO 13 #define WDNS_TYPE_MINFO 14 #define WDNS_TYPE_MX 15 #define WDNS_TYPE_TXT 16 #define WDNS_TYPE_RP 17 #define WDNS_TYPE_AFSDB 18 #define WDNS_TYPE_X25 19 #define WDNS_TYPE_ISDN 20 #define WDNS_TYPE_RT 21 #define WDNS_TYPE_NSAP 22 #define WDNS_TYPE_NSAP_PTR 23 #define WDNS_TYPE_SIG 24 #define WDNS_TYPE_KEY 25 #define WDNS_TYPE_PX 26 #define WDNS_TYPE_GPOS 27 #define WDNS_TYPE_AAAA 28 #define WDNS_TYPE_LOC 29 #define WDNS_TYPE_NXT 30 #define WDNS_TYPE_EID 31 #define WDNS_TYPE_NIMLOC 32 #define WDNS_TYPE_SRV 33 #define WDNS_TYPE_ATMA 34 #define WDNS_TYPE_NAPTR 35 #define WDNS_TYPE_KX 36 #define WDNS_TYPE_CERT 37 #define WDNS_TYPE_A6 38 #define WDNS_TYPE_DNAME 39 #define WDNS_TYPE_SINK 40 #define WDNS_TYPE_OPT 41 #define WDNS_TYPE_APL 42 #define WDNS_TYPE_DS 43 #define WDNS_TYPE_SSHFP 44 #define WDNS_TYPE_IPSECKEY 45 #define WDNS_TYPE_RRSIG 46 #define WDNS_TYPE_NSEC 47 #define WDNS_TYPE_DNSKEY 48 #define WDNS_TYPE_DHCID 49 #define WDNS_TYPE_NSEC3 50 #define WDNS_TYPE_NSEC3PARAM 51 #define WDNS_TYPE_TLSA 52 /* Unassigned: 53 - 54 */ #define WDNS_TYPE_HIP 55 #define WDNS_TYPE_NINFO 56 #define WDNS_TYPE_RKEY 57 #define WDNS_TYPE_TALINK 58 #define WDNS_TYPE_CDS 59 #define WDNS_TYPE_CDNSKEY 60 #define WDNS_TYPE_OPENPGPKEY 61 #define WDNS_TYPE_CSYNC 62 /* Unassigned: 63 - 98 */ #define WDNS_TYPE_SPF 99 #define WDNS_TYPE_UINFO 100 #define WDNS_TYPE_UID 101 #define WDNS_TYPE_GID 102 #define WDNS_TYPE_UNSPEC 103 #define WDNS_TYPE_NID 104 #define WDNS_TYPE_L32 105 #define WDNS_TYPE_L64 106 #define WDNS_TYPE_LP 107 #define WDNS_TYPE_EUI48 108 #define WDNS_TYPE_EUI64 109 /* Unassigned: 110 - 248 */ #define WDNS_TYPE_TKEY 249 #define WDNS_TYPE_TSIG 250 #define WDNS_TYPE_IXFR 251 #define WDNS_TYPE_AXFR 252 #define WDNS_TYPE_MAILB 253 #define WDNS_TYPE_MAILA 254 #define WDNS_TYPE_ANY 255 #define WDNS_TYPE_URI 256 #define WDNS_TYPE_CAA 257 /* Unassigned: 258 - 32767 */ #define WDNS_TYPE_TA 32768 #define WDNS_TYPE_DLV 32769 /* Unassigned: 32770 - 65279 */ /* Private use: 65280 - 65534 */ /* Reserved: 65535 */ /* Macros. */ #define WDNS_FLAGS_QR(msg) ((((msg).flags) >> 15) & 0x01) #define WDNS_FLAGS_OPCODE(msg) ((((msg).flags) >> 11) & 0x0f) #define WDNS_FLAGS_AA(msg) ((((msg).flags) >> 10) & 0x01) #define WDNS_FLAGS_TC(msg) ((((msg).flags) >> 9) & 0x01) #define WDNS_FLAGS_RD(msg) ((((msg).flags) >> 8) & 0x01) #define WDNS_FLAGS_RA(msg) ((((msg).flags) >> 7) & 0x01) #define WDNS_FLAGS_Z(msg) ((((msg).flags) >> 6) & 0x01) #define WDNS_FLAGS_AD(msg) ((((msg).flags) >> 5) & 0x01) #define WDNS_FLAGS_CD(msg) ((((msg).flags) >> 4) & 0x01) #define WDNS_FLAGS_RCODE(msg) ((msg).rcode) #if defined(__GNUC__) # define WDNS_WARN_UNUSED_RESULT __attribute__ ((warn_unused_result)) #else # define WDNS_WARN_UNUSED_RESULT #endif /* Data structures and definitions. */ typedef enum { wdns_res_success, wdns_res_failure, wdns_res_invalid_compression_pointer, wdns_res_invalid_length_octet, wdns_res_invalid_opcode, wdns_res_invalid_rcode, wdns_res_len, wdns_res_malloc, wdns_res_name_len, wdns_res_name_overflow, wdns_res_out_of_bounds, wdns_res_overflow, wdns_res_parse_error, wdns_res_qdcount, wdns_res_unknown_opcode, wdns_res_unknown_rcode, } wdns_res; typedef struct { uint8_t len; uint8_t *data; } wdns_name_t; typedef struct { uint16_t len; uint8_t data[]; } wdns_rdata_t; typedef struct { uint32_t rrttl; uint16_t rrtype; uint16_t rrclass; wdns_name_t name; wdns_rdata_t *rdata; } wdns_rr_t; typedef struct { uint32_t rrttl; uint16_t rrtype; uint16_t rrclass; uint16_t n_rdatas; wdns_name_t name; wdns_rdata_t **rdatas; } wdns_rrset_t; typedef struct { uint16_t n_rrs; uint16_t n_rrsets; wdns_rr_t *rrs; wdns_rrset_t *rrsets; } wdns_rrset_array_t; typedef struct { bool present; uint8_t version; uint16_t flags; uint16_t size; wdns_rdata_t *options; } wdns_edns_t; typedef struct { wdns_rrset_array_t sections[4]; wdns_edns_t edns; uint16_t id; uint16_t flags; uint16_t rcode; } wdns_message_t; /* Function prototypes. */ typedef void (*wdns_callback_name)(wdns_name_t *name, void *user); /* Functions for converting objects to presentation format strings. */ const char * wdns_res_to_str(wdns_res res); const char * wdns_opcode_to_str(uint16_t dns_opcode); const char * wdns_rcode_to_str(uint16_t dns_rcode); const char * wdns_rrclass_to_str(uint16_t dns_class); const char * wdns_rrtype_to_str(uint16_t dns_type); size_t wdns_domain_to_str(const uint8_t *src, size_t src_len, char *dst); char * wdns_message_to_str(wdns_message_t *m); char * wdns_rrset_array_to_str(wdns_rrset_array_t *a, unsigned sec); char * wdns_rrset_to_str(wdns_rrset_t *rrset, unsigned sec); char * wdns_rr_to_str(wdns_rr_t *rr, unsigned sec); char * wdns_rdata_to_str(const uint8_t *rdata, uint16_t rdlen, uint16_t rrtype, uint16_t rrclass); /* Functions for converting presentation format strings to objects. */ WDNS_WARN_UNUSED_RESULT wdns_res wdns_str_to_name(const char *str, wdns_name_t *name); WDNS_WARN_UNUSED_RESULT wdns_res wdns_str_to_name_case(const char *str, wdns_name_t *name); wdns_res wdns_str_to_rcode(const char *str, uint16_t *out); uint16_t wdns_str_to_rrtype(const char *str); uint16_t wdns_str_to_rrclass(const char *str); wdns_res wdns_str_to_rdata(const char * str, uint16_t rrtype, uint16_t rrclass, uint8_t **rdata, size_t *rdlen); /* Comparison functions. */ bool wdns_compare_rr_rrset(const wdns_rr_t *rr, const wdns_rrset_t *rrset); /* Functions for clearing wdns objects. */ void wdns_clear_message(wdns_message_t *m); void wdns_clear_rr(wdns_rr_t *rr); void wdns_clear_rrset(wdns_rrset_t *rrset); void wdns_clear_rrset_array(wdns_rrset_array_t *a); /* Functions for printing formatted output. */ void wdns_print_message(FILE *fp, wdns_message_t *m); void wdns_print_rr(FILE *fp, wdns_rr_t *rr, unsigned sec); void wdns_print_rrset(FILE *fp, wdns_rrset_t *rrset, unsigned sec); void wdns_print_rrset_array(FILE *fp, wdns_rrset_array_t *a, unsigned sec); /* Utility functions. */ size_t wdns_skip_name(const uint8_t **data, const uint8_t *eod); wdns_res wdns_copy_uname(const uint8_t *p, const uint8_t *eop, const uint8_t *src, uint8_t *dst, size_t *sz); wdns_res wdns_len_uname(const uint8_t *p, const uint8_t *eop, size_t *sz); wdns_res wdns_sort_rrset(wdns_rrset_t *); wdns_res wdns_unpack_name(const uint8_t *p, const uint8_t *eop, const uint8_t *src, uint8_t *dst, size_t *sz); wdns_res wdns_count_labels(wdns_name_t *name, size_t *nlabels); wdns_res wdns_is_subdomain(wdns_name_t *n0, wdns_name_t *n1, bool *is_subdomain); wdns_res wdns_file_load_names(const char *fname, wdns_callback_name cb, void *user); wdns_res wdns_left_chop(wdns_name_t *name, wdns_name_t *chop); WDNS_WARN_UNUSED_RESULT wdns_res wdns_reverse_name(const uint8_t *name, size_t len_name, uint8_t *rev_name); /* Parsing functions. */ wdns_res wdns_parse_message(wdns_message_t *m, const uint8_t *pkt, size_t len); /* Deserialization functions. */ wdns_res wdns_deserialize_rrset(wdns_rrset_t *rrset, const uint8_t *buf, size_t sz); /* Serialization functions. */ wdns_res wdns_serialize_rrset(const wdns_rrset_t *rrset, uint8_t *buf, size_t *sz); /* Downcasing functions. */ void wdns_downcase_name(wdns_name_t *name); wdns_res wdns_downcase_rdata(wdns_rdata_t *rdata, uint16_t rrtype, uint16_t rrclass); wdns_res wdns_downcase_rrset(wdns_rrset_t *rrset); #ifdef __cplusplus } #endif #endif /* WDNS_H */
farsightsec/sie-dns-sensor
build-deps/debian-7-i386/dest/usr/include/wdns.h
C
apache-2.0
9,510
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.lib.generator.sdcrecord; import com.google.common.collect.ImmutableSet; import com.streamsets.pipeline.api.ext.ContextExtensions; import com.streamsets.pipeline.lib.generator.DataGeneratorFactory; import com.streamsets.pipeline.lib.generator.DataGenerator; import java.io.IOException; import java.io.OutputStream; import java.util.HashMap; import java.util.Map; import java.util.Set; public class SdcRecordDataGeneratorFactory extends DataGeneratorFactory { public static final Map<String, Object> CONFIGS = new HashMap<>(); @SuppressWarnings("unchecked") public static final Set<Class<? extends Enum>> MODES = (Set) ImmutableSet.of(); private final ContextExtensions context; public SdcRecordDataGeneratorFactory(Settings settings) { super(settings); this.context = (ContextExtensions) settings.getContext(); } @Override public DataGenerator getGenerator(OutputStream os) throws IOException { return new SdcRecordDataGenerator(context.createRecordWriter(os), context); } }
z123/datacollector
commonlib/src/main/java/com/streamsets/pipeline/lib/generator/sdcrecord/SdcRecordDataGeneratorFactory.java
Java
apache-2.0
1,644
<?php /** * This example gets all targeting criteria for a campaign. To add targeting * criteria, run AddCampaignTargetingCriteria.php. * * Tags: CampaignCriterionService.get * * Copyright 2014, Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @package GoogleApiAdsAdWords * @subpackage v201502 * @category WebServices * @copyright 2014, Google Inc. All Rights Reserved. * @license http://www.apache.org/licenses/LICENSE-2.0 Apache License, * Version 2.0 * @author Adam Rogal * @author Eric Koleda */ // Include the initialization file require_once dirname(dirname(__FILE__)) . '/init.php'; // Enter parameters required by the code example. $campaignId = 'INSERT_CAMPAIGN_ID_HERE'; /** * Runs the example. * @param AdWordsUser $user the user to run the example with * @param string $campaignId the ID of the campaign to get targeting criteria * for */ function GetCampaignTargetingCriteriaExample(AdWordsUser $user, $campaignId) { // Get the service, which loads the required classes. $campaignCriterionService = $user->GetService('CampaignCriterionService', ADWORDS_VERSION); // Create selector. $selector = new Selector(); $selector->fields = array('Id', 'CriteriaType'); // Create predicates. $selector->predicates[] = new Predicate('CampaignId', 'IN', array($campaignId)); $selector->predicates[] = new Predicate('CriteriaType', 'IN', array('LANGUAGE', 'LOCATION', 'AGE_RANGE', 'CARRIER', 'OPERATING_SYSTEM_VERSION', 'GENDER', 'POLYGON', 'PROXIMITY', 'PLATFORM')); // Create paging controls. $selector->paging = new Paging(0, AdWordsConstants::RECOMMENDED_PAGE_SIZE); do { // Make the get request. $page = $campaignCriterionService->get($selector); // Display results. if (isset($page->entries)) { foreach ($page->entries as $campaignCriterion) { printf("Campaign targeting criterion with ID '%s' and type '%s' was " . "found.\n", $campaignCriterion->criterion->id, $campaignCriterion->criterion->CriterionType); } } else { print "No campaign targeting criteria were found.\n"; } // Advance the paging index. $selector->paging->startIndex += AdWordsConstants::RECOMMENDED_PAGE_SIZE; } while ($page->totalNumEntries > $selector->paging->startIndex); } // Don't run the example if the file is being included. if (__FILE__ != realpath($_SERVER['PHP_SELF'])) { return; } try { // Get AdWordsUser from credentials in "../auth.ini" // relative to the AdWordsUser.php file's directory. $user = new AdWordsUser(); // Log every SOAP XML request and response. $user->LogAll(); // Run the example. GetCampaignTargetingCriteriaExample($user, $campaignId); } catch (Exception $e) { printf("An error has occurred: %s\n", $e->getMessage()); }
claimsmall/googleads-php-lib
examples/AdWords/v201502/Targeting/GetCampaignTargetingCriteria.php
PHP
apache-2.0
3,407
/* * Copyright (C) 2016 The Dagger Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dagger.functional; import static com.google.common.truth.Truth.assertThat; import dagger.functional.ComponentWithReusableBindings.ChildOne; import dagger.functional.ComponentWithReusableBindings.ChildTwo; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(JUnit4.class) public class ReusableTest { @Test public void testReusable() { ComponentWithReusableBindings parent = DaggerComponentWithReusableBindings.create(); ChildOne childOne = parent.childOne(); ChildTwo childTwo = parent.childTwo(); Object reusableInParent = parent.reusableInParent(); assertThat(parent.reusableInParent()).isSameInstanceAs(reusableInParent); assertThat(childOne.reusableInParent()).isSameInstanceAs(reusableInParent); assertThat(childTwo.reusableInParent()).isSameInstanceAs(reusableInParent); Object reusableFromChildOne = childOne.reusableInChild(); assertThat(childOne.reusableInChild()).isSameInstanceAs(reusableFromChildOne); Object reusableFromChildTwo = childTwo.reusableInChild(); assertThat(childTwo.reusableInChild()).isSameInstanceAs(reusableFromChildTwo); assertThat(reusableFromChildTwo).isNotSameInstanceAs(reusableFromChildOne); } }
cgruber/dagger
javatests/dagger/functional/ReusableTest.java
Java
apache-2.0
1,849
/* * Copyright 2012 Proofpoint, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.proofpoint.launcher; class PidStatus { public final boolean held; public final int pid; private PidStatus(boolean held, int pid) { this.held = held; this.pid = pid; } public static PidStatus notHeld() { return new PidStatus(false, 0); } public static PidStatus heldBy(int pid) { return new PidStatus(true, pid); } }
gwittel/platform
launcher/src/main/java/com/proofpoint/launcher/PidStatus.java
Java
apache-2.0
1,008
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.interpreter.remote; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; /** * */ public class RemoteInterpreterUtils { static Logger LOGGER = LoggerFactory.getLogger(RemoteInterpreterUtils.class); public static int findRandomAvailablePortOnAllLocalInterfaces() throws IOException { int port; try (ServerSocket socket = new ServerSocket(0);) { port = socket.getLocalPort(); socket.close(); } return port; } public static boolean checkIfRemoteEndpointAccessible(String host, int port) { try { Socket discover = new Socket(); discover.setSoTimeout(1000); discover.connect(new InetSocketAddress(host, port), 1000); discover.close(); return true; } catch (IOException e) { LOGGER.info("Exception in RemoteInterpreterUtils while checkIfRemoteEndpointAccessible", e); return false; } } }
HeartSaVioR/incubator-zeppelin
zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterUtils.java
Java
apache-2.0
1,831
/*** Copyright (c) 2015 CommonsWare, LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.commonsware.cwac.cam2.playground; import android.app.Activity; import android.app.Fragment; import android.content.Intent; import android.graphics.Bitmap; import android.os.Bundle; public class MainActivity extends Activity implements PlaygroundFragment.Contract { private static final int REQUEST_CAMERA=1337; private static final String TAG_PLAYGROUND=PlaygroundFragment.class.getCanonicalName(); private static final String TAG_RESULT=ResultFragment.class.getCanonicalName(); private PlaygroundFragment playground=null; private ResultFragment result=null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); playground=(PlaygroundFragment)getFragmentManager().findFragmentByTag(TAG_PLAYGROUND); result=(ResultFragment)getFragmentManager().findFragmentByTag(TAG_RESULT); if (playground==null) { playground=new PlaygroundFragment(); getFragmentManager() .beginTransaction() .add(android.R.id.content, playground, TAG_PLAYGROUND) .commit(); } if (result==null) { result=ResultFragment.newInstance(); getFragmentManager() .beginTransaction() .add(android.R.id.content, result, TAG_RESULT) .hide(result) .commit(); } } public void takePicture(Intent i) { startActivityForResult(i, REQUEST_CAMERA); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode==REQUEST_CAMERA) if (resultCode==Activity.RESULT_OK) { Bitmap bitmap=data.getParcelableExtra("data"); if (bitmap==null) { result.setImage(data.getData()); } else { result.setImage(bitmap); } getFragmentManager() .beginTransaction() .hide(playground) .show(result) .addToBackStack(null) .commit(); } super.onActivityResult(requestCode, resultCode, data); } }
msoftware/cwac-cam2
demo-playground/src/main/java/com/commonsware/cwac/cam2/playground/MainActivity.java
Java
apache-2.0
2,608
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" import contextlib import copy import datetime import errno import functools import hashlib import hmac import inspect import logging as std_logging import os import pyclbr import random import re import shutil import socket import struct import sys import tempfile import time from xml.sax import saxutils import eventlet import netaddr from oslo_concurrency import lockutils from oslo_concurrency import processutils from oslo_config import cfg from oslo_context import context as common_context from oslo_log import log as logging import oslo_messaging as messaging from oslo_utils import encodeutils from oslo_utils import excutils from oslo_utils import importutils from oslo_utils import strutils from oslo_utils import timeutils from oslo_utils import units import six from six.moves import range from nova import exception from nova.i18n import _, _LE, _LI, _LW notify_decorator = 'nova.notifications.notify_decorator' monkey_patch_opts = [ cfg.BoolOpt('monkey_patch', default=False, help='Whether to log monkey patching'), cfg.ListOpt('monkey_patch_modules', default=[ 'nova.api.ec2.cloud:%s' % (notify_decorator), 'nova.compute.api:%s' % (notify_decorator) ], help='List of modules/decorators to monkey patch'), ] utils_opts = [ cfg.IntOpt('password_length', default=12, help='Length of generated instance admin passwords'), cfg.StrOpt('instance_usage_audit_period', default='month', help='Time period to generate instance usages for. ' 'Time period must be hour, day, month or year'), cfg.BoolOpt('use_rootwrap_daemon', default=False, help="Start and use a daemon that can run the commands that " "need to be run with root privileges. This option is " "usually enabled on nodes that run nova compute " "processes"), cfg.StrOpt('rootwrap_config', default="/etc/nova/rootwrap.conf", help='Path to the rootwrap configuration file to use for ' 'running commands as root'), cfg.StrOpt('tempdir', help='Explicitly specify the temporary working directory'), ] workarounds_opts = [ cfg.BoolOpt('disable_rootwrap', default=False, help='This option allows a fallback to sudo for performance ' 'reasons. For example see ' 'https://bugs.launchpad.net/nova/+bug/1415106'), cfg.BoolOpt('disable_libvirt_livesnapshot', default=True, help='When using libvirt 1.2.2 live snapshots fail ' 'intermittently under load. This config option provides ' 'a mechanism to enable live snapshot while this is ' 'resolved. See ' 'https://bugs.launchpad.net/nova/+bug/1334398'), cfg.BoolOpt('destroy_after_evacuate', default=True, deprecated_for_removal=True, help='DEPRECATED: Whether to destroy ' 'instances on startup when we suspect ' 'they have previously been evacuated. This can result in ' 'data loss if undesired. See ' 'https://launchpad.net/bugs/1419785'), cfg.BoolOpt('handle_virt_lifecycle_events', default=True, help="Whether or not to handle events raised from the compute " "driver's 'emit_event' method. These are lifecycle " "events raised from compute drivers that implement the " "method. An example of a lifecycle event is an instance " "starting or stopping. If the instance is going through " "task state changes due to an API operation, like " "resize, the events are ignored. However, this is an " "advanced feature which allows the hypervisor to signal " "to the compute service that an unexpected state change " "has occurred in an instance and the instance can be " "shutdown automatically - which can inherently race in " "reboot operations or when the compute service or host " "is rebooted, either planned or due to an unexpected " "outage. Care should be taken when using this and " "sync_power_state_interval is negative since then if any " "instances are out of sync between the hypervisor and " "the Nova database they will have to be synchronized " "manually. See https://bugs.launchpad.net/bugs/1444630"), ] """ The workarounds_opts group is for very specific reasons. If you're: - Working around an issue in a system tool (e.g. libvirt or qemu) where the fix is in flight/discussed in that community. - The tool can be/is fixed in some distributions and rather than patch the code those distributions can trivially set a config option to get the "correct" behavior. Then this is a good place for your workaround. .. warning:: Please use with care! Document the BugID that your workaround is paired with. """ CONF = cfg.CONF CONF.register_opts(monkey_patch_opts) CONF.register_opts(utils_opts) CONF.import_opt('network_api_class', 'nova.network') CONF.register_opts(workarounds_opts, group='workarounds') LOG = logging.getLogger(__name__) # used in limits TIME_UNITS = { 'SECOND': 1, 'MINUTE': 60, 'HOUR': 3600, 'DAY': 86400 } _IS_NEUTRON = None synchronized = lockutils.synchronized_with_prefix('nova-') SM_IMAGE_PROP_PREFIX = "image_" SM_INHERITABLE_KEYS = ( 'min_ram', 'min_disk', 'disk_format', 'container_format', ) # Keys which hold large structured data that won't fit in the # size constraints of the system_metadata table, so we avoid # storing and/or loading them. SM_SKIP_KEYS = ( # Legacy names 'mappings', 'block_device_mapping', # Modern names 'img_mappings', 'img_block_device_mapping', ) # Image attributes which Cinder stores in volume image metadata # as regular properties VIM_IMAGE_ATTRIBUTES = ( 'image_id', 'image_name', 'size', 'checksum', 'container_format', 'disk_format', 'min_ram', 'min_disk', ) _FILE_CACHE = {} def vpn_ping(address, port, timeout=0.05, session_id=None): """Sends a vpn negotiation packet and returns the server session. Returns Boolean indicating whether the vpn_server is listening. Basic packet structure is below. Client packet (14 bytes):: 0 1 8 9 13 +-+--------+-----+ |x| cli_id |?????| +-+--------+-----+ x = packet identifier 0x38 cli_id = 64 bit identifier ? = unknown, probably flags/padding Server packet (26 bytes):: 0 1 8 9 13 14 21 2225 +-+--------+-----+--------+----+ |x| srv_id |?????| cli_id |????| +-+--------+-----+--------+----+ x = packet identifier 0x40 cli_id = 64 bit identifier ? = unknown, probably flags/padding bit 9 was 1 and the rest were 0 in testing """ # NOTE(tonyb) session_id isn't used for a real VPN connection so using a # cryptographically weak value is fine. if session_id is None: session_id = random.randint(0, 0xffffffffffffffff) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) data = struct.pack('!BQxxxxx', 0x38, session_id) sock.sendto(data, (address, port)) sock.settimeout(timeout) try: received = sock.recv(2048) except socket.timeout: return False finally: sock.close() fmt = '!BQxxxxxQxxxx' if len(received) != struct.calcsize(fmt): LOG.warning(_LW('Expected to receive %(exp)s bytes, ' 'but actually %(act)s'), dict(exp=struct.calcsize(fmt), act=len(received))) return False (identifier, server_sess, client_sess) = struct.unpack(fmt, received) return (identifier == 0x40 and client_sess == session_id) def get_root_helper(): if CONF.workarounds.disable_rootwrap: cmd = 'sudo' else: cmd = 'sudo nova-rootwrap %s' % CONF.rootwrap_config return cmd def _get_rootwrap_helper(): if CONF.use_rootwrap_daemon: return RootwrapDaemonHelper(CONF.rootwrap_config) else: return RootwrapProcessHelper() class RootwrapProcessHelper(object): def trycmd(self, *cmd, **kwargs): kwargs['root_helper'] = get_root_helper() return processutils.trycmd(*cmd, **kwargs) def execute(self, *cmd, **kwargs): kwargs['root_helper'] = get_root_helper() return processutils.execute(*cmd, **kwargs) class RootwrapDaemonHelper(RootwrapProcessHelper): _clients = {} @synchronized('daemon-client-lock') def _get_client(cls, rootwrap_config): try: return cls._clients[rootwrap_config] except KeyError: from oslo_rootwrap import client new_client = client.Client([ "sudo", "nova-rootwrap-daemon", rootwrap_config]) cls._clients[rootwrap_config] = new_client return new_client def __init__(self, rootwrap_config): self.client = self._get_client(rootwrap_config) def trycmd(self, *args, **kwargs): discard_warnings = kwargs.pop('discard_warnings', False) try: out, err = self.execute(*args, **kwargs) failed = False except processutils.ProcessExecutionError as exn: out, err = '', six.text_type(exn) failed = True if not failed and discard_warnings and err: # Handle commands that output to stderr but otherwise succeed err = '' return out, err def execute(self, *cmd, **kwargs): # NOTE(dims): This method is to provide compatibility with the # processutils.execute interface. So that calling daemon or direct # rootwrap to honor the same set of flags in kwargs and to ensure # that we don't regress any current behavior. cmd = [str(c) for c in cmd] loglevel = kwargs.pop('loglevel', std_logging.DEBUG) log_errors = kwargs.pop('log_errors', None) process_input = kwargs.pop('process_input', None) delay_on_retry = kwargs.pop('delay_on_retry', True) attempts = kwargs.pop('attempts', 1) check_exit_code = kwargs.pop('check_exit_code', [0]) ignore_exit_code = False if isinstance(check_exit_code, bool): ignore_exit_code = not check_exit_code check_exit_code = [0] elif isinstance(check_exit_code, int): check_exit_code = [check_exit_code] sanitized_cmd = strutils.mask_password(' '.join(cmd)) LOG.info(_LI('Executing RootwrapDaemonHelper.execute ' 'cmd=[%(cmd)r] kwargs=[%(kwargs)r]'), {'cmd': sanitized_cmd, 'kwargs': kwargs}) while attempts > 0: attempts -= 1 try: start_time = time.time() LOG.log(loglevel, _('Running cmd (subprocess): %s'), sanitized_cmd) (returncode, out, err) = self.client.execute( cmd, process_input) end_time = time.time() - start_time LOG.log(loglevel, 'CMD "%(sanitized_cmd)s" returned: %(return_code)s ' 'in %(end_time)0.3fs', {'sanitized_cmd': sanitized_cmd, 'return_code': returncode, 'end_time': end_time}) if not ignore_exit_code and returncode not in check_exit_code: out = strutils.mask_password(out) err = strutils.mask_password(err) raise processutils.ProcessExecutionError( exit_code=returncode, stdout=out, stderr=err, cmd=sanitized_cmd) return (out, err) except processutils.ProcessExecutionError as err: # if we want to always log the errors or if this is # the final attempt that failed and we want to log that. if log_errors == processutils.LOG_ALL_ERRORS or ( log_errors == processutils.LOG_FINAL_ERROR and not attempts): format = _('%(desc)r\ncommand: %(cmd)r\n' 'exit code: %(code)r\nstdout: %(stdout)r\n' 'stderr: %(stderr)r') LOG.log(loglevel, format, {"desc": err.description, "cmd": err.cmd, "code": err.exit_code, "stdout": err.stdout, "stderr": err.stderr}) if not attempts: LOG.log(loglevel, _('%r failed. Not Retrying.'), sanitized_cmd) raise else: LOG.log(loglevel, _('%r failed. Retrying.'), sanitized_cmd) if delay_on_retry: time.sleep(random.randint(20, 200) / 100.0) def execute(*cmd, **kwargs): """Convenience wrapper around oslo's execute() method.""" if 'run_as_root' in kwargs and kwargs.get('run_as_root'): if CONF.use_rootwrap_daemon: return RootwrapDaemonHelper(CONF.rootwrap_config).execute( *cmd, **kwargs) else: return RootwrapProcessHelper().execute(*cmd, **kwargs) return processutils.execute(*cmd, **kwargs) def ssh_execute(dest, *cmd, **kwargs): """Convenience wrapper to execute ssh command.""" ssh_cmd = ['ssh', '-o', 'BatchMode=yes'] ssh_cmd.append(dest) ssh_cmd.extend(cmd) return execute(*ssh_cmd, **kwargs) def trycmd(*args, **kwargs): """Convenience wrapper around oslo's trycmd() method.""" if kwargs.get('run_as_root', False): if CONF.use_rootwrap_daemon: return RootwrapDaemonHelper(CONF.rootwrap_config).trycmd( *args, **kwargs) else: return RootwrapProcessHelper().trycmd(*args, **kwargs) return processutils.trycmd(*args, **kwargs) def novadir(): import nova return os.path.abspath(nova.__file__).split('nova/__init__.py')[0] def generate_uid(topic, size=8): characters = '01234567890abcdefghijklmnopqrstuvwxyz' choices = [random.choice(characters) for _x in range(size)] return '%s-%s' % (topic, ''.join(choices)) # Default symbols to use for passwords. Avoids visually confusing characters. # ~6 bits per symbol DEFAULT_PASSWORD_SYMBOLS = ('23456789', # Removed: 0,1 'ABCDEFGHJKLMNPQRSTUVWXYZ', # Removed: I, O 'abcdefghijkmnopqrstuvwxyz') # Removed: l # ~5 bits per symbol EASIER_PASSWORD_SYMBOLS = ('23456789', # Removed: 0, 1 'ABCDEFGHJKLMNPQRSTUVWXYZ') # Removed: I, O def last_completed_audit_period(unit=None, before=None): """This method gives you the most recently *completed* audit period. arguments: units: string, one of 'hour', 'day', 'month', 'year' Periods normally begin at the beginning (UTC) of the period unit (So a 'day' period begins at midnight UTC, a 'month' unit on the 1st, a 'year' on Jan, 1) unit string may be appended with an optional offset like so: 'day@18' This will begin the period at 18:00 UTC. 'month@15' starts a monthly period on the 15th, and year@3 begins a yearly one on March 1st. before: Give the audit period most recently completed before <timestamp>. Defaults to now. returns: 2 tuple of datetimes (begin, end) The begin timestamp of this audit period is the same as the end of the previous. """ if not unit: unit = CONF.instance_usage_audit_period offset = 0 if '@' in unit: unit, offset = unit.split("@", 1) offset = int(offset) if before is not None: rightnow = before else: rightnow = timeutils.utcnow() if unit not in ('month', 'day', 'year', 'hour'): raise ValueError('Time period must be hour, day, month or year') if unit == 'month': if offset == 0: offset = 1 end = datetime.datetime(day=offset, month=rightnow.month, year=rightnow.year) if end >= rightnow: year = rightnow.year if 1 >= rightnow.month: year -= 1 month = 12 + (rightnow.month - 1) else: month = rightnow.month - 1 end = datetime.datetime(day=offset, month=month, year=year) year = end.year if 1 >= end.month: year -= 1 month = 12 + (end.month - 1) else: month = end.month - 1 begin = datetime.datetime(day=offset, month=month, year=year) elif unit == 'year': if offset == 0: offset = 1 end = datetime.datetime(day=1, month=offset, year=rightnow.year) if end >= rightnow: end = datetime.datetime(day=1, month=offset, year=rightnow.year - 1) begin = datetime.datetime(day=1, month=offset, year=rightnow.year - 2) else: begin = datetime.datetime(day=1, month=offset, year=rightnow.year - 1) elif unit == 'day': end = datetime.datetime(hour=offset, day=rightnow.day, month=rightnow.month, year=rightnow.year) if end >= rightnow: end = end - datetime.timedelta(days=1) begin = end - datetime.timedelta(days=1) elif unit == 'hour': end = rightnow.replace(minute=offset, second=0, microsecond=0) if end >= rightnow: end = end - datetime.timedelta(hours=1) begin = end - datetime.timedelta(hours=1) return (begin, end) def generate_password(length=None, symbolgroups=DEFAULT_PASSWORD_SYMBOLS): """Generate a random password from the supplied symbol groups. At least one symbol from each group will be included. Unpredictable results if length is less than the number of symbol groups. Believed to be reasonably secure (with a reasonable password length!) """ if length is None: length = CONF.password_length r = random.SystemRandom() # NOTE(jerdfelt): Some password policies require at least one character # from each group of symbols, so start off with one random character # from each symbol group password = [r.choice(s) for s in symbolgroups] # If length < len(symbolgroups), the leading characters will only # be from the first length groups. Try our best to not be predictable # by shuffling and then truncating. r.shuffle(password) password = password[:length] length -= len(password) # then fill with random characters from all symbol groups symbols = ''.join(symbolgroups) password.extend([r.choice(symbols) for _i in range(length)]) # finally shuffle to ensure first x characters aren't from a # predictable group r.shuffle(password) return ''.join(password) def get_my_linklocal(interface): try: if_str = execute('ip', '-f', 'inet6', '-o', 'addr', 'show', interface) condition = '\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link' links = [re.search(condition, x) for x in if_str[0].split('\n')] address = [w.group(1) for w in links if w is not None] if address[0] is not None: return address[0] else: msg = _('Link Local address is not found.:%s') % if_str raise exception.NovaException(msg) except Exception as ex: msg = _("Couldn't get Link Local IP of %(interface)s" " :%(ex)s") % {'interface': interface, 'ex': ex} raise exception.NovaException(msg) def xhtml_escape(value): """Escapes a string so it is valid within XML or XHTML. """ return saxutils.escape(value, {'"': '&quot;', "'": '&apos;'}) def utf8(value): """Try to turn a string into utf-8 if possible. Code is directly from the utf8 function in http://github.com/facebook/tornado/blob/master/tornado/escape.py """ if isinstance(value, six.text_type): return value.encode('utf-8') assert isinstance(value, str) return value def check_isinstance(obj, cls): """Checks that obj is of type cls, and lets PyLint infer types.""" if isinstance(obj, cls): return obj raise Exception(_('Expected object of type: %s') % (str(cls))) def parse_server_string(server_str): """Parses the given server_string and returns a tuple of host and port. If it's not a combination of host part and port, the port element is an empty string. If the input is invalid expression, return a tuple of two empty strings. """ try: # First of all, exclude pure IPv6 address (w/o port). if netaddr.valid_ipv6(server_str): return (server_str, '') # Next, check if this is IPv6 address with a port number combination. if server_str.find("]:") != -1: (address, port) = server_str.replace('[', '', 1).split(']:') return (address, port) # Third, check if this is a combination of an address and a port if server_str.find(':') == -1: return (server_str, '') # This must be a combination of an address and a port (address, port) = server_str.split(':') return (address, port) except (ValueError, netaddr.AddrFormatError): LOG.error(_LE('Invalid server_string: %s'), server_str) return ('', '') def is_valid_ipv6_cidr(address): try: netaddr.IPNetwork(address, version=6).cidr return True except (TypeError, netaddr.AddrFormatError): return False def get_shortened_ipv6(address): addr = netaddr.IPAddress(address, version=6) return str(addr.ipv6()) def get_shortened_ipv6_cidr(address): net = netaddr.IPNetwork(address, version=6) return str(net.cidr) def is_valid_cidr(address): """Check if address is valid The provided address can be a IPv6 or a IPv4 CIDR address. """ try: # Validate the correct CIDR Address netaddr.IPNetwork(address) except netaddr.AddrFormatError: return False # Prior validation partially verify /xx part # Verify it here ip_segment = address.split('/') if (len(ip_segment) <= 1 or ip_segment[1] == ''): return False return True def get_ip_version(network): """Returns the IP version of a network (IPv4 or IPv6). Raises AddrFormatError if invalid network. """ if netaddr.IPNetwork(network).version == 6: return "IPv6" elif netaddr.IPNetwork(network).version == 4: return "IPv4" def safe_ip_format(ip): """Transform ip string to "safe" format. Will return ipv4 addresses unchanged, but will nest ipv6 addresses inside square brackets. """ try: if netaddr.IPAddress(ip).version == 6: return '[%s]' % ip except (TypeError, netaddr.AddrFormatError): # hostname pass # it's IPv4 or hostname return ip def monkey_patch(): """If the CONF.monkey_patch set as True, this function patches a decorator for all functions in specified modules. You can set decorators for each modules using CONF.monkey_patch_modules. The format is "Module path:Decorator function". Example: 'nova.api.ec2.cloud:nova.notifications.notify_decorator' Parameters of the decorator is as follows. (See nova.notifications.notify_decorator) name - name of the function function - object of the function """ # If CONF.monkey_patch is not True, this function do nothing. if not CONF.monkey_patch: return if six.PY3: def is_method(obj): # Unbound methods became regular functions on Python 3 return inspect.ismethod(obj) or inspect.isfunction(obj) else: is_method = inspect.ismethod # Get list of modules and decorators for module_and_decorator in CONF.monkey_patch_modules: module, decorator_name = module_and_decorator.split(':') # import decorator function decorator = importutils.import_class(decorator_name) __import__(module) # Retrieve module information using pyclbr module_data = pyclbr.readmodule_ex(module) for key, value in module_data.items(): # set the decorator for the class methods if isinstance(value, pyclbr.Class): clz = importutils.import_class("%s.%s" % (module, key)) for method, func in inspect.getmembers(clz, is_method): setattr(clz, method, decorator("%s.%s.%s" % (module, key, method), func)) # set the decorator for the function if isinstance(value, pyclbr.Function): func = importutils.import_class("%s.%s" % (module, key)) setattr(sys.modules[module], key, decorator("%s.%s" % (module, key), func)) def convert_to_list_dict(lst, label): """Convert a value or list into a list of dicts.""" if not lst: return None if not isinstance(lst, list): lst = [lst] return [{label: x} for x in lst] def make_dev_path(dev, partition=None, base='/dev'): """Return a path to a particular device. >>> make_dev_path('xvdc') /dev/xvdc >>> make_dev_path('xvdc', 1) /dev/xvdc1 """ path = os.path.join(base, dev) if partition: path += str(partition) return path def sanitize_hostname(hostname, default_name=None): """Return a hostname which conforms to RFC-952 and RFC-1123 specs except the length of hostname. Window, Linux, and Dnsmasq has different limitation: Windows: 255 (net_bios limits to 15, but window will truncate it) Linux: 64 Dnsmasq: 63 Due to nova-network will leverage dnsmasq to set hostname, so we chose 63. """ def truncate_hostname(name): if len(name) > 63: LOG.warning(_LW("Hostname %(hostname)s is longer than 63, " "truncate it to %(truncated_name)s"), {'hostname': name, 'truncated_name': name[:63]}) return name[:63] if isinstance(hostname, six.text_type): # Remove characters outside the Unicode range U+0000-U+00FF hostname = hostname.encode('latin-1', 'ignore') if six.PY3: hostname = hostname.decode('latin-1') hostname = re.sub('[ _]', '-', hostname) hostname = re.sub('[^\w.-]+', '', hostname) hostname = hostname.lower() hostname = hostname.strip('.-') # NOTE(eliqiao): set hostname to default_display_name to avoid # empty hostname if hostname == "" and default_name is not None: return truncate_hostname(default_name) return truncate_hostname(hostname) @contextlib.contextmanager def temporary_mutation(obj, **kwargs): """Temporarily set the attr on a particular object to a given value then revert when finished. One use of this is to temporarily set the read_deleted flag on a context object: with temporary_mutation(context, read_deleted="yes"): do_something_that_needed_deleted_objects() """ def is_dict_like(thing): return hasattr(thing, 'has_key') def get(thing, attr, default): if is_dict_like(thing): return thing.get(attr, default) else: return getattr(thing, attr, default) def set_value(thing, attr, val): if is_dict_like(thing): thing[attr] = val else: setattr(thing, attr, val) def delete(thing, attr): if is_dict_like(thing): del thing[attr] else: delattr(thing, attr) NOT_PRESENT = object() old_values = {} for attr, new_value in kwargs.items(): old_values[attr] = get(obj, attr, NOT_PRESENT) set_value(obj, attr, new_value) try: yield finally: for attr, old_value in old_values.items(): if old_value is NOT_PRESENT: delete(obj, attr) else: set_value(obj, attr, old_value) def generate_mac_address(): """Generate an Ethernet MAC address.""" # NOTE(vish): We would prefer to use 0xfe here to ensure that linux # bridge mac addresses don't change, but it appears to # conflict with libvirt, so we use the next highest octet # that has the unicast and locally administered bits set # properly: 0xfa. # Discussion: https://bugs.launchpad.net/nova/+bug/921838 mac = [0xfa, 0x16, 0x3e, random.randint(0x00, 0xff), random.randint(0x00, 0xff), random.randint(0x00, 0xff)] return ':'.join(map(lambda x: "%02x" % x, mac)) def read_file_as_root(file_path): """Secure helper to read file as root.""" try: out, _err = execute('cat', file_path, run_as_root=True) return out except processutils.ProcessExecutionError: raise exception.FileNotFound(file_path=file_path) @contextlib.contextmanager def temporary_chown(path, owner_uid=None): """Temporarily chown a path. :param owner_uid: UID of temporary owner (defaults to current user) """ if owner_uid is None: owner_uid = os.getuid() orig_uid = os.stat(path).st_uid if orig_uid != owner_uid: execute('chown', owner_uid, path, run_as_root=True) try: yield finally: if orig_uid != owner_uid: execute('chown', orig_uid, path, run_as_root=True) @contextlib.contextmanager def tempdir(**kwargs): argdict = kwargs.copy() if 'dir' not in argdict: argdict['dir'] = CONF.tempdir tmpdir = tempfile.mkdtemp(**argdict) try: yield tmpdir finally: try: shutil.rmtree(tmpdir) except OSError as e: LOG.error(_LE('Could not remove tmpdir: %s'), e) def walk_class_hierarchy(clazz, encountered=None): """Walk class hierarchy, yielding most derived classes first.""" if not encountered: encountered = [] for subclass in clazz.__subclasses__(): if subclass not in encountered: encountered.append(subclass) # drill down to leaves first for subsubclass in walk_class_hierarchy(subclass, encountered): yield subsubclass yield subclass class UndoManager(object): """Provides a mechanism to facilitate rolling back a series of actions when an exception is raised. """ def __init__(self): self.undo_stack = [] def undo_with(self, undo_func): self.undo_stack.append(undo_func) def _rollback(self): for undo_func in reversed(self.undo_stack): undo_func() def rollback_and_reraise(self, msg=None, **kwargs): """Rollback a series of actions then re-raise the exception. .. note:: (sirp) This should only be called within an exception handler. """ with excutils.save_and_reraise_exception(): if msg: LOG.exception(msg, **kwargs) self._rollback() def mkfs(fs, path, label=None, run_as_root=False): """Format a file or block device :param fs: Filesystem type (examples include 'swap', 'ext3', 'ext4' 'btrfs', etc.) :param path: Path to file or block device to format :param label: Volume label to use """ if fs == 'swap': args = ['mkswap'] else: args = ['mkfs', '-t', fs] # add -F to force no interactive execute on non-block device. if fs in ('ext3', 'ext4', 'ntfs'): args.extend(['-F']) if label: if fs in ('msdos', 'vfat'): label_opt = '-n' else: label_opt = '-L' args.extend([label_opt, label]) args.append(path) execute(*args, run_as_root=run_as_root) def last_bytes(file_like_object, num): """Return num bytes from the end of the file, and remaining byte count. :param file_like_object: The file to read :param num: The number of bytes to return :returns (data, remaining) """ try: file_like_object.seek(-num, os.SEEK_END) except IOError as e: # seek() fails with EINVAL when trying to go before the start of the # file. It means that num is larger than the file size, so just # go to the start. if e.errno == errno.EINVAL: file_like_object.seek(0, os.SEEK_SET) else: raise remaining = file_like_object.tell() return (file_like_object.read(), remaining) def metadata_to_dict(metadata, filter_deleted=False): result = {} for item in metadata: if not filter_deleted and item.get('deleted'): continue result[item['key']] = item['value'] return result def dict_to_metadata(metadata): result = [] for key, value in six.iteritems(metadata): result.append(dict(key=key, value=value)) return result def instance_meta(instance): if isinstance(instance['metadata'], dict): return instance['metadata'] else: return metadata_to_dict(instance['metadata']) def instance_sys_meta(instance): if not instance.get('system_metadata'): return {} if isinstance(instance['system_metadata'], dict): return instance['system_metadata'] else: return metadata_to_dict(instance['system_metadata'], filter_deleted=True) def get_wrapped_function(function): """Get the method at the bottom of a stack of decorators.""" if not hasattr(function, '__closure__') or not function.__closure__: return function def _get_wrapped_function(function): if not hasattr(function, '__closure__') or not function.__closure__: return None for closure in function.__closure__: func = closure.cell_contents deeper_func = _get_wrapped_function(func) if deeper_func: return deeper_func elif hasattr(closure.cell_contents, '__call__'): return closure.cell_contents return _get_wrapped_function(function) def expects_func_args(*args): def _decorator_checker(dec): @functools.wraps(dec) def _decorator(f): base_f = get_wrapped_function(f) arg_names, a, kw, _default = inspect.getargspec(base_f) if a or kw or set(args) <= set(arg_names): # NOTE (ndipanov): We can't really tell if correct stuff will # be passed if it's a function with *args or **kwargs so # we still carry on and hope for the best return dec(f) else: raise TypeError("Decorated function %(f_name)s does not " "have the arguments expected by the " "decorator %(d_name)s" % {'f_name': base_f.__name__, 'd_name': dec.__name__}) return _decorator return _decorator_checker class ExceptionHelper(object): """Class to wrap another and translate the ClientExceptions raised by its function calls to the actual ones. """ def __init__(self, target): self._target = target def __getattr__(self, name): func = getattr(self._target, name) @functools.wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except messaging.ExpectedException as e: raise (e.exc_info[1], None, e.exc_info[2]) return wrapper def check_string_length(value, name=None, min_length=0, max_length=None): """Check the length of specified string :param value: the value of the string :param name: the name of the string :param min_length: the min_length of the string :param max_length: the max_length of the string """ if not isinstance(value, six.string_types): if name is None: msg = _("The input is not a string or unicode") else: msg = _("%s is not a string or unicode") % name raise exception.InvalidInput(message=msg) if name is None: name = value if len(value) < min_length: msg = _("%(name)s has a minimum character requirement of " "%(min_length)s.") % {'name': name, 'min_length': min_length} raise exception.InvalidInput(message=msg) if max_length and len(value) > max_length: msg = _("%(name)s has more than %(max_length)s " "characters.") % {'name': name, 'max_length': max_length} raise exception.InvalidInput(message=msg) def validate_integer(value, name, min_value=None, max_value=None): """Make sure that value is a valid integer, potentially within range.""" try: value = int(str(value)) except (ValueError, UnicodeEncodeError): msg = _('%(value_name)s must be an integer') raise exception.InvalidInput(reason=( msg % {'value_name': name})) if min_value is not None: if value < min_value: msg = _('%(value_name)s must be >= %(min_value)d') raise exception.InvalidInput( reason=(msg % {'value_name': name, 'min_value': min_value})) if max_value is not None: if value > max_value: msg = _('%(value_name)s must be <= %(max_value)d') raise exception.InvalidInput( reason=( msg % {'value_name': name, 'max_value': max_value}) ) return value def spawn(func, *args, **kwargs): """Passthrough method for eventlet.spawn. This utility exists so that it can be stubbed for testing without interfering with the service spawns. It will also grab the context from the threadlocal store and add it to the store on the new thread. This allows for continuity in logging the context when using this method to spawn a new thread. """ _context = common_context.get_current() @functools.wraps(func) def context_wrapper(*args, **kwargs): # NOTE: If update_store is not called after spawn it won't be # available for the logger to pull from threadlocal storage. if _context is not None: _context.update_store() return func(*args, **kwargs) return eventlet.spawn(context_wrapper, *args, **kwargs) def spawn_n(func, *args, **kwargs): """Passthrough method for eventlet.spawn_n. This utility exists so that it can be stubbed for testing without interfering with the service spawns. It will also grab the context from the threadlocal store and add it to the store on the new thread. This allows for continuity in logging the context when using this method to spawn a new thread. """ _context = common_context.get_current() @functools.wraps(func) def context_wrapper(*args, **kwargs): # NOTE: If update_store is not called after spawn_n it won't be # available for the logger to pull from threadlocal storage. if _context is not None: _context.update_store() func(*args, **kwargs) eventlet.spawn_n(context_wrapper, *args, **kwargs) def is_none_string(val): """Check if a string represents a None value. """ if not isinstance(val, six.string_types): return False return val.lower() == 'none' def convert_version_to_int(version): try: if isinstance(version, six.string_types): version = convert_version_to_tuple(version) if isinstance(version, tuple): return six.moves.reduce(lambda x, y: (x * 1000) + y, version) except Exception: msg = _("Hypervisor version %s is invalid.") % version raise exception.NovaException(msg) def convert_version_to_str(version_int): version_numbers = [] factor = 1000 while version_int != 0: version_number = version_int - (version_int // factor * factor) version_numbers.insert(0, str(version_number)) version_int = version_int // factor return six.moves.reduce(lambda x, y: "%s.%s" % (x, y), version_numbers) def convert_version_to_tuple(version_str): return tuple(int(part) for part in version_str.split('.')) def is_neutron(): global _IS_NEUTRON if _IS_NEUTRON is not None: return _IS_NEUTRON try: # compatibility with Folsom/Grizzly configs cls_name = CONF.network_api_class if cls_name == 'nova.network.quantumv2.api.API': cls_name = 'nova.network.neutronv2.api.API' from nova.network.neutronv2 import api as neutron_api _IS_NEUTRON = issubclass(importutils.import_class(cls_name), neutron_api.API) except ImportError: _IS_NEUTRON = False return _IS_NEUTRON def is_auto_disk_config_disabled(auto_disk_config_raw): auto_disk_config_disabled = False if auto_disk_config_raw is not None: adc_lowered = auto_disk_config_raw.strip().lower() if adc_lowered == "disabled": auto_disk_config_disabled = True return auto_disk_config_disabled def get_auto_disk_config_from_instance(instance=None, sys_meta=None): if sys_meta is None: sys_meta = instance_sys_meta(instance) return sys_meta.get("image_auto_disk_config") def get_auto_disk_config_from_image_props(image_properties): return image_properties.get("auto_disk_config") def get_system_metadata_from_image(image_meta, flavor=None): system_meta = {} prefix_format = SM_IMAGE_PROP_PREFIX + '%s' for key, value in six.iteritems(image_meta.get('properties', {})): if key in SM_SKIP_KEYS: continue new_value = safe_truncate(six.text_type(value), 255) system_meta[prefix_format % key] = new_value for key in SM_INHERITABLE_KEYS: value = image_meta.get(key) if key == 'min_disk' and flavor: if image_meta.get('disk_format') == 'vhd': value = flavor['root_gb'] else: value = max(value, flavor['root_gb']) if value is None: continue system_meta[prefix_format % key] = value return system_meta def get_image_from_system_metadata(system_meta): image_meta = {} properties = {} if not isinstance(system_meta, dict): system_meta = metadata_to_dict(system_meta, filter_deleted=True) for key, value in six.iteritems(system_meta): if value is None: continue # NOTE(xqueralt): Not sure this has to inherit all the properties or # just the ones we need. Leaving it for now to keep the old behaviour. if key.startswith(SM_IMAGE_PROP_PREFIX): key = key[len(SM_IMAGE_PROP_PREFIX):] if key in SM_SKIP_KEYS: continue if key in SM_INHERITABLE_KEYS: image_meta[key] = value else: properties[key] = value image_meta['properties'] = properties return image_meta def get_image_metadata_from_volume(volume): properties = copy.copy(volume.get('volume_image_metadata', {})) image_meta = {'properties': properties} # Volume size is no longer related to the original image size, # so we take it from the volume directly. Cinder creates # volumes in Gb increments, and stores size in Gb, whereas # glance reports size in bytes. As we're returning glance # metadata here, we need to convert it. image_meta['size'] = volume.get('size', 0) * units.Gi # NOTE(yjiang5): restore the basic attributes # NOTE(mdbooth): These values come from volume_glance_metadata # in cinder. This is a simple key/value table, and all values # are strings. We need to convert them to ints to avoid # unexpected type errors. for attr in VIM_IMAGE_ATTRIBUTES: val = properties.pop(attr, None) if attr in ('min_ram', 'min_disk'): image_meta[attr] = int(val or 0) # NOTE(yjiang5): Always set the image status as 'active' # and depends on followed volume_api.check_attach() to # verify it. This hack should be harmless with that check. image_meta['status'] = 'active' return image_meta def get_hash_str(base_str): """Returns string that represents MD5 hash of base_str (in hex format). If base_str is a Unicode string, encode it to UTF-8. """ if isinstance(base_str, six.text_type): base_str = base_str.encode('utf-8') return hashlib.md5(base_str).hexdigest() if hasattr(hmac, 'compare_digest'): constant_time_compare = hmac.compare_digest else: def constant_time_compare(first, second): """Returns True if both string inputs are equal, otherwise False. This function should take a constant amount of time regardless of how many characters in the strings match. """ if len(first) != len(second): return False result = 0 for x, y in zip(first, second): result |= ord(x) ^ ord(y) return result == 0 def filter_and_format_resource_metadata(resource_type, resource_list, search_filts, metadata_type=None): """Get all metadata for a list of resources after filtering. Search_filts is a list of dictionaries, where the values in the dictionary can be string or regex string, or a list of strings/regex strings. Let's call a dict a 'filter block' and an item in the dict a 'filter'. A tag is returned if it matches ALL the filters in a filter block. If more than one values are specified for a filter, a tag is returned if it matches ATLEAST ONE value of the filter. If more than one filter blocks are specified, the tag should match ALL the filter blocks. For example: search_filts = [{'key': ['key1', 'key2'], 'value': 'val1'}, {'value': 'val2'}] The filter translates to 'match any tag for which': ((key=key1 AND value=val1) OR (key=key2 AND value=val1)) AND (value=val2) This example filter will never match a tag. :param resource_type: The resource type as a string, e.g. 'instance' :param resource_list: List of resource objects :param search_filts: Filters to filter metadata to be returned. Can be dict (e.g. {'key': 'env', 'value': 'prod'}, or a list of dicts (e.g. [{'key': 'env'}, {'value': 'beta'}]. Note that the values of the dict can be regular expressions. :param metadata_type: Provided to search for a specific metadata type (e.g. 'system_metadata') :returns: List of dicts where each dict is of the form {'key': 'somekey', 'value': 'somevalue', 'instance_id': 'some-instance-uuid-aaa'} if resource_type is 'instance'. """ if isinstance(search_filts, dict): search_filts = [search_filts] def _get_id(resource): if resource_type == 'instance': return resource.get('uuid') def _match_any(pattern_list, string): if isinstance(pattern_list, str): pattern_list = [pattern_list] return any([re.match(pattern, string) for pattern in pattern_list]) def _filter_metadata(resource, search_filt, input_metadata): ids = search_filt.get('resource_id', []) keys_filter = search_filt.get('key', []) values_filter = search_filt.get('value', []) output_metadata = {} if ids and _get_id(resource) not in ids: return {} for k, v in six.iteritems(input_metadata): # Both keys and value defined -- AND if (keys_filter and values_filter and not _match_any(keys_filter, k) and not _match_any(values_filter, v)): continue # Only keys or value is defined elif ((keys_filter and not _match_any(keys_filter, k)) or (values_filter and not _match_any(values_filter, v))): continue output_metadata[k] = v return output_metadata formatted_metadata_list = [] for res in resource_list: if resource_type == 'instance': # NOTE(rushiagr): metadata_type should be 'metadata' or # 'system_metadata' if resource_type is instance. Defaulting to # 'metadata' if not specified. if metadata_type is None: metadata_type = 'metadata' metadata = res.get(metadata_type, {}) for filt in search_filts: # By chaining the input to the output, the filters are # ANDed together metadata = _filter_metadata(res, filt, metadata) for (k, v) in metadata.items(): formatted_metadata_list.append({'key': k, 'value': v, '%s_id' % resource_type: _get_id(res)}) return formatted_metadata_list def safe_truncate(value, length): """Safely truncates unicode strings such that their encoded length is no greater than the length provided. """ b_value = encodeutils.safe_encode(value)[:length] # NOTE(chaochin) UTF-8 character byte size varies from 1 to 6. If # truncating a long byte string to 255, the last character may be # cut in the middle, so that UnicodeDecodeError will occur when # converting it back to unicode. decode_ok = False while not decode_ok: try: u_value = encodeutils.safe_decode(b_value) decode_ok = True except UnicodeDecodeError: b_value = b_value[:-1] return u_value def read_cached_file(filename, force_reload=False): """Read from a file if it has been modified. :param force_reload: Whether to reload the file. :returns: A tuple with a boolean specifying if the data is fresh or not. """ global _FILE_CACHE if force_reload: delete_cached_file(filename) reloaded = False mtime = os.path.getmtime(filename) cache_info = _FILE_CACHE.setdefault(filename, {}) if not cache_info or mtime > cache_info.get('mtime', 0): LOG.debug("Reloading cached file %s", filename) with open(filename) as fap: cache_info['data'] = fap.read() cache_info['mtime'] = mtime reloaded = True return (reloaded, cache_info['data']) def delete_cached_file(filename): """Delete cached file if present. :param filename: filename to delete """ global _FILE_CACHE if filename in _FILE_CACHE: del _FILE_CACHE[filename]
Francis-Liu/animated-broccoli
nova/utils.py
Python
apache-2.0
53,116
/* * Copyright (c) 2010-2015, b3log.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.b3log.solo.processor.console; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.b3log.latke.Keys; import org.b3log.latke.logging.Level; import org.b3log.latke.logging.Logger; import org.b3log.latke.service.LangPropsService; import org.b3log.latke.service.ServiceException; import org.b3log.latke.servlet.HTTPRequestContext; import org.b3log.latke.servlet.HTTPRequestMethod; import org.b3log.latke.servlet.annotation.RequestProcessing; import org.b3log.latke.servlet.annotation.RequestProcessor; import org.b3log.latke.servlet.renderer.JSONRenderer; import org.b3log.latke.util.Requests; import org.b3log.solo.model.Preference; import org.b3log.solo.model.Sign; import org.b3log.solo.service.PreferenceMgmtService; import org.b3log.solo.service.PreferenceQueryService; import org.b3log.solo.service.UserQueryService; import org.b3log.solo.util.QueryResults; import org.json.JSONArray; import org.json.JSONObject; /** * Preference console request processing. * * @author <a href="http://88250.b3log.org">Liang Ding</a> * @version 1.0.0.6, May 17, 2013 * @since 0.4.0 */ @RequestProcessor public class PreferenceConsole { /** * Logger. */ private static final Logger LOGGER = Logger.getLogger(PreferenceConsole.class.getName()); /** * Preference query service. */ @Inject private PreferenceQueryService preferenceQueryService; /** * Preference management service. */ @Inject private PreferenceMgmtService preferenceMgmtService; /** * User query service. */ @Inject private UserQueryService userQueryService; /** * Language service. */ @Inject private LangPropsService langPropsService; /** * Preference URI prefix. */ private static final String PREFERENCE_URI_PREFIX = "/console/preference/"; /** * Gets reply template. * * <p> * Renders the response with a json object, for example, * <pre> * { * "sc": boolean, * "replyNotificationTemplate": { * "subject": "", * "body": "" * } * } * </pre> * </p> * * @param request the specified http servlet request * @param response the specified http servlet response * @param context the specified http request context * @throws Exception exception */ @RequestProcessing(value = "/console/reply/notification/template", method = HTTPRequestMethod.GET) public void getReplyNotificationTemplate(final HttpServletRequest request, final HttpServletResponse response, final HTTPRequestContext context) throws Exception { if (!userQueryService.isLoggedIn(request, response)) { response.sendError(HttpServletResponse.SC_FORBIDDEN); return; } final JSONRenderer renderer = new JSONRenderer(); context.setRenderer(renderer); try { final JSONObject replyNotificationTemplate = preferenceQueryService.getReplyNotificationTemplate(); final JSONObject ret = new JSONObject(); renderer.setJSONObject(ret); ret.put(Preference.REPLY_NOTIFICATION_TEMPLATE, replyNotificationTemplate); ret.put(Keys.STATUS_CODE, true); } catch (final Exception e) { LOGGER.log(Level.ERROR, e.getMessage(), e); final JSONObject jsonObject = QueryResults.defaultResult(); renderer.setJSONObject(jsonObject); jsonObject.put(Keys.MSG, langPropsService.get("getFailLabel")); } } /** * Updates reply template. * * @param request the specified http servlet request, for example, * <pre> * { * "replyNotificationTemplate": { * "subject": "", * "body": "" * } * } * </pre> * @param response the specified http servlet response * @param context the specified http request context * @throws Exception exception */ @RequestProcessing(value = "/console/reply/notification/template", method = HTTPRequestMethod.PUT) public void updateReplyNotificationTemplate(final HttpServletRequest request, final HttpServletResponse response, final HTTPRequestContext context) throws Exception { if (!userQueryService.isLoggedIn(request, response)) { response.sendError(HttpServletResponse.SC_FORBIDDEN); return; } final JSONRenderer renderer = new JSONRenderer(); context.setRenderer(renderer); try { final JSONObject requestJSONObject = Requests.parseRequestJSONObject(request, response); final JSONObject replyNotificationTemplate = requestJSONObject.getJSONObject(Preference.REPLY_NOTIFICATION_TEMPLATE); preferenceMgmtService.updateReplyNotificationTemplate(replyNotificationTemplate); final JSONObject ret = new JSONObject(); ret.put(Keys.STATUS_CODE, true); ret.put(Keys.MSG, langPropsService.get("updateSuccLabel")); renderer.setJSONObject(ret); } catch (final Exception e) { LOGGER.log(Level.ERROR, e.getMessage(), e); final JSONObject jsonObject = QueryResults.defaultResult(); renderer.setJSONObject(jsonObject); jsonObject.put(Keys.MSG, langPropsService.get("updateFailLabel")); } } /** * Gets signs. * * <p> * Renders the response with a json object, for example, * <pre> * { * "sc": boolean, * "signs": [{ * "oId": "", * "signHTML": "" * }, ...] * } * </pre> * </p> * * @param request the specified http servlet request * @param response the specified http servlet response * @param context the specified http request context * @throws Exception exception */ @RequestProcessing(value = "/console/signs/", method = HTTPRequestMethod.GET) public void getSigns(final HttpServletRequest request, final HttpServletResponse response, final HTTPRequestContext context) throws Exception { if (!userQueryService.isLoggedIn(request, response)) { response.sendError(HttpServletResponse.SC_FORBIDDEN); return; } final JSONRenderer renderer = new JSONRenderer(); context.setRenderer(renderer); try { final JSONObject preference = preferenceQueryService.getPreference(); final JSONArray signs = new JSONArray(); final JSONArray allSigns = // includes the empty sign(id=0) new JSONArray(preference.getString(Preference.SIGNS)); for (int i = 1; i < allSigns.length(); i++) { // excludes the empty sign signs.put(allSigns.getJSONObject(i)); } final JSONObject ret = new JSONObject(); renderer.setJSONObject(ret); ret.put(Sign.SIGNS, signs); ret.put(Keys.STATUS_CODE, true); } catch (final Exception e) { LOGGER.log(Level.ERROR, e.getMessage(), e); final JSONObject jsonObject = QueryResults.defaultResult(); renderer.setJSONObject(jsonObject); jsonObject.put(Keys.MSG, langPropsService.get("getFailLabel")); } } /** * Gets preference. * * <p> * Renders the response with a json object, for example, * <pre> * { * "sc": boolean, * "preference": { * "mostViewArticleDisplayCount": int, * "recentCommentDisplayCount": int, * "mostUsedTagDisplayCount": int, * "articleListDisplayCount": int, * "articleListPaginationWindowSize": int, * "mostCommentArticleDisplayCount": int, * "externalRelevantArticlesDisplayCount": int, * "relevantArticlesDisplayCount": int, * "randomArticlesDisplayCount": int, * "blogTitle": "", * "blogSubtitle": "", * "localeString": "", * "timeZoneId": "", * "skinName": "", * "skinDirName": "", * "skins": "[{ * "skinName": "", * "skinDirName": "" * }, ....]", * "noticeBoard": "", * "htmlHead": "", * "adminEmail": "", * "metaKeywords": "", * "metaDescription": "", * "enableArticleUpdateHint": boolean, * "signs": "[{ * "oId": "", * "signHTML": "" * }, ...]", * "allowVisitDraftViaPermalink": boolean, * "version": "", * "articleListStyle": "", // Optional values: "titleOnly"/"titleAndContent"/"titleAndAbstract" * "commentable": boolean, * "feedOutputMode: "" // Optional values: "abstract"/"full" * "feedOutputCnt": int * } * } * </pre> * </p> * * @param request the specified http servlet request * @param response the specified http servlet response * @param context the specified http request context * @throws Exception exception */ @RequestProcessing(value = PREFERENCE_URI_PREFIX, method = HTTPRequestMethod.GET) public void getPreference(final HttpServletRequest request, final HttpServletResponse response, final HTTPRequestContext context) throws Exception { if (!userQueryService.isAdminLoggedIn(request)) { response.sendError(HttpServletResponse.SC_FORBIDDEN); return; } final JSONRenderer renderer = new JSONRenderer(); context.setRenderer(renderer); try { final JSONObject preference = preferenceQueryService.getPreference(); if (null == preference) { renderer.setJSONObject(QueryResults.defaultResult()); return; } final JSONObject ret = new JSONObject(); renderer.setJSONObject(ret); ret.put(Preference.PREFERENCE, preference); ret.put(Keys.STATUS_CODE, true); } catch (final Exception e) { LOGGER.log(Level.ERROR, e.getMessage(), e); final JSONObject jsonObject = QueryResults.defaultResult(); renderer.setJSONObject(jsonObject); jsonObject.put(Keys.MSG, langPropsService.get("getFailLabel")); } } /** * Updates the preference by the specified request. * * @param request the specified http servlet request, for example, * <pre> * { * "preference": { * "mostViewArticleDisplayCount": int, * "recentCommentDisplayCount": int, * "mostUsedTagDisplayCount": int, * "articleListDisplayCount": int, * "articleListPaginationWindowSize": int, * "mostCommentArticleDisplayCount": int, * "externalRelevantArticlesDisplayCount": int, * "relevantArticlesDisplayCount": int, * "randomArticlesDisplayCount": int, * "blogTitle": "", * "blogSubtitle": "", * "skinDirName": "", * "localeString": "", * "timeZoneId": "", * "noticeBoard": "", * "htmlHead": "", * "metaKeywords": "", * "metaDescription": "", * "enableArticleUpdateHint": boolean, * "signs": [{ * "oId": "", * "signHTML": "" * }, ...], * "allowVisitDraftViaPermalink": boolean, * "articleListStyle": "", * "commentable": boolean, * "feedOutputMode: "", * "feedOutputCnt": int * } * }, see {@link org.b3log.solo.model.Preference} for more details * </pre> * @param response the specified http servlet response * @param context the specified http request context * @throws Exception exception */ @RequestProcessing(value = PREFERENCE_URI_PREFIX, method = HTTPRequestMethod.PUT) public void updatePreference(final HttpServletRequest request, final HttpServletResponse response, final HTTPRequestContext context) throws Exception { if (!userQueryService.isAdminLoggedIn(request)) { response.sendError(HttpServletResponse.SC_FORBIDDEN); return; } final JSONRenderer renderer = new JSONRenderer(); context.setRenderer(renderer); try { final JSONObject requestJSONObject = Requests.parseRequestJSONObject(request, response); final JSONObject preference = requestJSONObject.getJSONObject(Preference.PREFERENCE); final JSONObject ret = new JSONObject(); renderer.setJSONObject(ret); if (isInvalid(preference, ret)) { return; } preferenceMgmtService.updatePreference(preference); ret.put(Keys.STATUS_CODE, true); ret.put(Keys.MSG, langPropsService.get("updateSuccLabel")); } catch (final ServiceException e) { LOGGER.log(Level.ERROR, e.getMessage(), e); final JSONObject jsonObject = QueryResults.defaultResult(); renderer.setJSONObject(jsonObject); jsonObject.put(Keys.MSG, e.getMessage()); } } /** * Checks whether the specified preference is invalid and sets the specified response object. * * @param preference the specified preference * @param responseObject the specified response object * @return {@code true} if the specified preference is invalid, returns {@code false} otherwise */ private boolean isInvalid(final JSONObject preference, final JSONObject responseObject) { responseObject.put(Keys.STATUS_CODE, false); final StringBuilder errMsgBuilder = new StringBuilder('[' + langPropsService.get("paramSettingsLabel")); errMsgBuilder.append(" - "); String input = preference.optString(Preference.EXTERNAL_RELEVANT_ARTICLES_DISPLAY_CNT); if (!isNonNegativeInteger(input)) { errMsgBuilder.append(langPropsService.get("externalRelevantArticlesDisplayCntLabel")).append("] ").append( langPropsService.get("nonNegativeIntegerOnlyLabel")); responseObject.put(Keys.MSG, errMsgBuilder.toString()); return true; } input = preference.optString(Preference.RELEVANT_ARTICLES_DISPLAY_CNT); if (!isNonNegativeInteger(input)) { errMsgBuilder.append(langPropsService.get("relevantArticlesDisplayCntLabel")).append("] ").append( langPropsService.get("nonNegativeIntegerOnlyLabel")); responseObject.put(Keys.MSG, errMsgBuilder.toString()); return true; } input = preference.optString(Preference.RANDOM_ARTICLES_DISPLAY_CNT); if (!isNonNegativeInteger(input)) { errMsgBuilder.append(langPropsService.get("randomArticlesDisplayCntLabel")).append("] ").append( langPropsService.get("nonNegativeIntegerOnlyLabel")); responseObject.put(Keys.MSG, errMsgBuilder.toString()); return true; } input = preference.optString(Preference.MOST_COMMENT_ARTICLE_DISPLAY_CNT); if (!isNonNegativeInteger(input)) { errMsgBuilder.append(langPropsService.get("indexMostCommentArticleDisplayCntLabel")).append("] ").append( langPropsService.get("nonNegativeIntegerOnlyLabel")); responseObject.put(Keys.MSG, errMsgBuilder.toString()); return true; } input = preference.optString(Preference.MOST_VIEW_ARTICLE_DISPLAY_CNT); if (!isNonNegativeInteger(input)) { errMsgBuilder.append(langPropsService.get("indexMostViewArticleDisplayCntLabel")).append("] ").append( langPropsService.get("nonNegativeIntegerOnlyLabel")); responseObject.put(Keys.MSG, errMsgBuilder.toString()); return true; } input = preference.optString(Preference.RECENT_COMMENT_DISPLAY_CNT); if (!isNonNegativeInteger(input)) { errMsgBuilder.append(langPropsService.get("indexRecentCommentDisplayCntLabel")).append("] ").append( langPropsService.get("nonNegativeIntegerOnlyLabel")); responseObject.put(Keys.MSG, errMsgBuilder.toString()); return true; } input = preference.optString(Preference.MOST_USED_TAG_DISPLAY_CNT); if (!isNonNegativeInteger(input)) { errMsgBuilder.append(langPropsService.get("indexTagDisplayCntLabel")).append("] ").append( langPropsService.get("nonNegativeIntegerOnlyLabel")); responseObject.put(Keys.MSG, errMsgBuilder.toString()); return true; } input = preference.optString(Preference.ARTICLE_LIST_DISPLAY_COUNT); if (!isNonNegativeInteger(input)) { errMsgBuilder.append(langPropsService.get("pageSizeLabel")).append("] ").append( langPropsService.get("nonNegativeIntegerOnlyLabel")); responseObject.put(Keys.MSG, errMsgBuilder.toString()); return true; } input = preference.optString(Preference.ARTICLE_LIST_PAGINATION_WINDOW_SIZE); if (!isNonNegativeInteger(input)) { errMsgBuilder.append(langPropsService.get("windowSizeLabel")).append("] ").append( langPropsService.get("nonNegativeIntegerOnlyLabel")); responseObject.put(Keys.MSG, errMsgBuilder.toString()); return true; } input = preference.optString(Preference.FEED_OUTPUT_CNT); if (!isNonNegativeInteger(input)) { errMsgBuilder.append(langPropsService.get("feedOutputCntLabel")).append("] ").append( langPropsService.get("nonNegativeIntegerOnlyLabel")); responseObject.put(Keys.MSG, errMsgBuilder.toString()); return true; } return false; } /** * Checks whether the specified input is a non-negative integer. * * @param input the specified input * @return {@code true} if it is, returns {@code false} otherwise */ private boolean isNonNegativeInteger(final String input) { try { return 0 <= Integer.valueOf(input); } catch (final Exception e) { return false; } } }
sitexa/solo
core/src/main/java/org/b3log/solo/processor/console/PreferenceConsole.java
Java
apache-2.0
19,367
Tutorial -------- W Pythonie używane są dwa rodzaje pętli: "while" i "for". ### Pętla "for" Pętla for "przebiega" przez podany ciąg liczb. Przykład poniżej: pierwsze = [2,3,5,7] for pierwsza in pierwsze: print pierwsza Pętlę for możemy zrealizować z użyciem funkcji range i xrange. Różnica między nimi polega na tym, że range zwraca nową tablicę liczb zawartych w określonym przedziale, natomiast xrange zwraca pojedyńczą liczbę, co czyni ją w pętlach bardziej wydajną. Python 3.x używa funkcji range, która działa jak xrange. Funkcja xrange zwracając liczby zaczyna od 0. # Wypisze liczby 0 1 2 3 4 for x in xrange(5): print x, print # Wypisze 3 4 5 for x in xrange(3,6): print x, ### Pętla "while" Pętla while wykonuje się dopóki pewien warunek logiczny jest spełniony. Przykład: # Wypisze 0 1 2 3 4 licznik = 0 while licznik < 5: print licznik, licznik += 1 # Ma to taki sam efekt jak licznik = licznik + 1 ### Instrukcje "break" i "continue" `break` jest używany do zakończenia pętli for i while, podczas gdy `continue` pozwala opuścić blok instrukcji niżej i wrócić do nagłówka pętli. Kilka przykładów: # Wypisze 0 1 2 3 4 licznik = 0 while True: print licznik, licznik += 1 if licznik >= 5: break print # Wypisze tylko liczby nieparzyste - 1 3 5 7 9 for x in xrange(10): # Sprawdz, czy x jest parzyste if x % 2 == 0: continue print x, ### Ćwiczenie Za pomocą pętli wypisz wszystkie liczby **parzyste **z tablicy liczby w takiej samej kolejności w jakiej zostały w niej zapisane. Nie wypisuj żadnej liczby, która w tej tablicy znajduje się za liczbą 237. Tutorial Code ------------- liczby = [ 951, 402, 984, 651, 360, 69, 408, 319, 601, 485, 980, 507, 725, 547, 544, 615, 83, 165, 141, 501, 263, 617, 865, 575, 219, 390, 984, 592, 236, 105, 942, 941, 386, 462, 47, 418, 907, 344, 236, 375, 823, 566, 597, 978, 328, 615, 953, 345, 399, 162, 758, 219, 918, 237, 412, 566, 826, 248, 866, 950, 626, 949, 687, 217, 815, 67, 104, 58, 512, 24, 892, 894, 767, 553, 81, 379, 843, 831, 445, 742, 717, 958, 609, 842, 451, 688, 753, 854, 685, 93, 857, 440, 380, 126, 721, 328, 753, 470, 743, 527 ] # miejsce na twoj kod Expected Output --------------- 402 984 360 408 980 544 390 984 592 236 942 386 462 418 344 236 566 978 328 162 758 918 Solution --------
GenericSnake/interactive-tutorials
tutorials/learnpython.org/pl/Petle.md
Markdown
apache-2.0
2,644
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jmeter.report.processor.graph.impl; import java.util.Collections; import java.util.Map; import org.apache.jmeter.report.processor.MapResultData; import org.apache.jmeter.report.processor.SumAggregatorFactory; import org.apache.jmeter.report.processor.ValueResultData; import org.apache.jmeter.report.processor.graph.AbstractGraphConsumer; import org.apache.jmeter.report.processor.graph.AbstractOverTimeGraphConsumer; import org.apache.jmeter.report.processor.graph.CountValueSelector; import org.apache.jmeter.report.processor.graph.GraphKeysSelector; import org.apache.jmeter.report.processor.graph.GroupInfo; import org.apache.jmeter.report.processor.graph.NameSeriesSelector; /** * The class ResponseTimeDistributionGraphConsumer provides a graph to visualize * the distribution of the average response time per sample * * @since 3.0 */ public class ResponseTimeDistributionGraphConsumer extends AbstractGraphConsumer { private long granularity = 1L; /** * Gets the granularity. * * @return the granularity */ public final long getGranularity() { return granularity; } /** * @param granularity the granularity to set */ public final void setGranularity(long granularity) { this.granularity = granularity; } /* * (non-Javadoc) * * @see org.apache.jmeter.report.csv.processor.impl.AbstractGraphConsumer# * createKeysSelector() */ @Override protected final GraphKeysSelector createKeysSelector() { return sample -> { long elapsed = sample.getElapsedTime(); return (double) elapsed - elapsed % granularity; }; } /* * (non-Javadoc) * * @see org.apache.jmeter.report.csv.processor.impl.AbstractGraphConsumer# * createGroupInfos() */ @Override protected Map<String, GroupInfo> createGroupInfos() { return Collections.singletonMap( AbstractGraphConsumer.DEFAULT_GROUP, new GroupInfo( new SumAggregatorFactory(), new NameSeriesSelector(), // We include Transaction Controller results new CountValueSelector(false), false, false)); } /* * (non-Javadoc) * * @see org.apache.jmeter.report.processor.graph.AbstractGraphConsumer# * initializeExtraResults(org.apache.jmeter.report.processor.MapResultData) */ @Override protected void initializeExtraResults(MapResultData parentResult) { parentResult.setResult( AbstractOverTimeGraphConsumer.RESULT_CTX_GRANULARITY, new ValueResultData(granularity)); } }
benbenw/jmeter
src/core/src/main/java/org/apache/jmeter/report/processor/graph/impl/ResponseTimeDistributionGraphConsumer.java
Java
apache-2.0
3,522
val x: Int => String = null x(1)(<caret>) //
loskutov/intellij-scala
testdata/parameterInfo/functionParameterInfo/currings/NoCurrings.scala
Scala
apache-2.0
44
define(['modules/forms/module','bootstrap-validator'], function(module){ "use strict"; module.registerDirective('bootstrapProfileForm', function(){ return { restrict: 'E', replace: true, templateUrl: 'app/modules/forms/directives/bootstrap-validation/bootstrap-profile-form.tpl.html', link: function(scope, form){ form.bootstrapValidator({ feedbackIcons : { valid : 'glyphicon glyphicon-ok', invalid : 'glyphicon glyphicon-remove', validating : 'glyphicon glyphicon-refresh' }, fields : { email : { validators : { notEmpty : { message : 'The email address is required' }, emailAddress : { message : 'The email address is not valid' } } }, password : { validators : { notEmpty : { message : 'The password is required' } } } } }); ; } } }) });
Orpheus11/nile
web/app/modules/forms/directives/bootstrap-validation/bootstrapProfileForm.js
JavaScript
apache-2.0
1,532
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Simple Leaf-Spine Network application. */ package org.onosproject.simplefabric;
osinstom/onos
apps/simplefabric/src/main/java/org/onosproject/simplefabric/package-info.java
Java
apache-2.0
707
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.tests.indexer; import org.apache.druid.java.util.common.Pair; import org.apache.druid.testing.guice.DruidTestModuleFactory; import org.apache.druid.tests.TestNGGroup; import org.testng.annotations.Guice; import org.testng.annotations.Test; import java.util.List; /** * IMPORTANT: * To run this test, you must: * 1) Set the bucket and path for your data. This can be done by setting -Ddruid.test.config.cloudBucket and * -Ddruid.test.config.cloudPath or setting "cloud_bucket" and "cloud_path" in the config file. * 2) Copy wikipedia_index_data1.json, wikipedia_index_data2.json, and wikipedia_index_data3.json * located in integration-tests/src/test/resources/data/batch_index/json to your S3 at the location set in step 1. * 3) Provide -Doverride.config.path=<PATH_TO_FILE> with s3 credentials/configs set. See * integration-tests/docker/environment-configs/override-examples/s3 for env vars to provide. */ @Test(groups = TestNGGroup.S3_DEEP_STORAGE) @Guice(moduleFactory = DruidTestModuleFactory.class) public class ITS3ToS3ParallelIndexTest extends AbstractS3InputSourceParallelIndexTest { @Test(dataProvider = "resources") public void testS3IndexData(Pair<String, List> s3InputSource) throws Exception { doTest(s3InputSource, new Pair<>(false, false)); } }
nishantmonu51/druid
integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3ToS3ParallelIndexTest.java
Java
apache-2.0
2,128
/* * ModeShape (http://www.modeshape.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.infinispan.schematic.internal.document; import static org.infinispan.schematic.document.Json.ReservedField.BASE_64; import static org.infinispan.schematic.document.Json.ReservedField.BINARY_TYPE; import static org.infinispan.schematic.document.Json.ReservedField.CODE; import static org.infinispan.schematic.document.Json.ReservedField.DATE; import static org.infinispan.schematic.document.Json.ReservedField.INCREMENT; import static org.infinispan.schematic.document.Json.ReservedField.OBJECT_ID; import static org.infinispan.schematic.document.Json.ReservedField.REGEX_OPTIONS; import static org.infinispan.schematic.document.Json.ReservedField.REGEX_PATTERN; import static org.infinispan.schematic.document.Json.ReservedField.SCOPE; import static org.infinispan.schematic.document.Json.ReservedField.TIMESTAMP; import static org.infinispan.schematic.document.Json.ReservedField.UUID; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringReader; import java.net.URL; import java.nio.charset.Charset; import java.text.CharacterIterator; import java.text.ParseException; import java.text.StringCharacterIterator; import java.util.Date; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import org.infinispan.schematic.Base64; import org.infinispan.schematic.document.Bson.BinaryType; import org.infinispan.schematic.document.Document; import org.infinispan.schematic.document.DocumentSequence; import org.infinispan.schematic.document.Immutable; import org.infinispan.schematic.document.Json; import org.infinispan.schematic.document.NotThreadSafe; import org.infinispan.schematic.document.Null; import org.infinispan.schematic.document.ParsingException; import org.infinispan.schematic.document.ThreadSafe; /** * A class that reads the <a href="http://www.json.org/">JSON</a> data format and constructs an in-memory <a * href="http://bsonspec.org/">BSON</a> representation. * <p> * This reader is capable of optionally introspecting string values to look for certain string patterns that are commonly used to * represent dates. In introspection is not done by default, but when it is used it looks for the following patterns: * <ul> * <li>a string literal date of the form <code>"<i>yyyy</i>-<i>MM</i>-<i>dd</i>T<i>HH</i>:<i>mm</i>:<i>ss</i>"</code> where * <code>T</code> is a literal character</li> * <li>a string literal date of the form <code>"<i>yyyy</i>-<i>MM</i>-<i>dd</i>T<i>HH</i>:<i>mm</i>:<i>ss</i>Z"</code> where * <code>T</code> and <code>Z</code> are literal characters</li> * <li>a string literal date of the form * <code>"<i>yyyy</i>-<i>MM</i>-<i>dd</i>T<i>HH</i>:<i>mm</i>:<i>ss</i>GMT+<i>00</i>:<i>00</i>"</code> where <code>T</code>, and * <code>GMT</code> are literal characters</li> * <li>a string literal date of the form <code>"/Date(<i>millisOrIso</i>)/"</code></li> * <li>a string literal date of the form <code>"\/Date(<i>millisOrIso</i>)\/"</code></li> * </ul> * Note that in the date forms listed above, <code><i>millisOrIso</i></code> is either a long value representing the number of * milliseconds since epoch or a string literal in ISO-8601 format representing a date and time. * </p> * <p> * This reader also accepts non-string values that are function calls of the form * * <pre> * new <i>functionName</i>(<i>parameters</i>) * </pre> * * where <code><i>parameters</i></code> consists of one or more JSON values (including nested functions). If the function call * cannot be parsed and executed, the string literal form of the function call is kept. * </p> * * @author Randall Hauch <rhauch@redhat.com> (C) 2011 Red Hat Inc. * @since 5.1 */ @ThreadSafe @Immutable public class JsonReader { protected static final DocumentValueFactory VALUE_FACTORY = new DefaultDocumentValueFactory(); protected static final ValueMatcher SIMPLE_VALUE_MATCHER = new SimpleValueMatcher(VALUE_FACTORY); protected static final ValueMatcher DATE_VALUE_MATCHER = new DateValueMatcher(VALUE_FACTORY); public static final boolean DEFAULT_INTROSPECT = true; /** * Read the JSON representation from supplied URL and construct the {@link Document} representation, using the * {@link Charset#defaultCharset() default character set}. * * @param url the URL to the JSON document; may not be null and must be resolvable * @return the in-memory {@link Document} representation * @throws ParsingException if there was a problem reading from the URL */ public Document read( URL url ) throws ParsingException { try { return read(url.openStream(), DEFAULT_INTROSPECT); } catch (IOException e) { throw new ParsingException(e.getMessage(), e, 0, 0); } } /** * Read the JSON representation from supplied input stream and construct the {@link Document} representation, using the * {@link Charset#defaultCharset() default character set}. * * @param stream the input stream; may not be null * @return the in-memory {@link Document} representation * @throws ParsingException if there was a problem reading from the stream */ public Document read( InputStream stream ) throws ParsingException { return read(stream, DEFAULT_INTROSPECT); } /** * Read the JSON representation from supplied input stream and construct the {@link Document} representation, using the * supplied {@link Charset character set}. * * @param stream the input stream; may not be null * @param charset the character set that should be used; may not be null * @return the in-memory {@link Document} representation * @throws ParsingException if there was a problem reading from the stream */ public Document read( InputStream stream, Charset charset ) throws ParsingException { return read(stream, charset, DEFAULT_INTROSPECT); } /** * Read the JSON representation from supplied input stream and construct the {@link Document} representation. * * @param reader the IO reader; may not be null * @return the in-memory {@link Document} representation * @throws ParsingException if there was a problem reading from the stream */ public Document read( Reader reader ) throws ParsingException { return read(reader, DEFAULT_INTROSPECT); } /** * Read the JSON representation from supplied string and construct the {@link Document} representation. * * @param json the JSON representation; may not be null * @return the in-memory {@link Document} representation * @throws ParsingException if there was a problem reading from the stream */ public Document read( String json ) throws ParsingException { return read(json, DEFAULT_INTROSPECT); } /** * Read the JSON representation from supplied input stream and construct the {@link Document} representation, using the * {@link Charset#defaultCharset() default character set}. * * @param stream the input stream; may not be null * @param introspectStringValues true if the string values should be examined for common patterns, or false otherwise * @return the in-memory {@link Document} representation * @throws ParsingException if there was a problem reading from the stream */ public Document read( InputStream stream, boolean introspectStringValues ) throws ParsingException { return read(stream, Json.UTF8, introspectStringValues); } /** * Read the JSON representation from supplied input stream and construct the {@link Document} representation, using the * supplied {@link Charset character set}. * * @param stream the input stream; may not be null * @param charset the character set that should be used; may not be null * @param introspectStringValues true if the string values should be examined for common patterns, or false otherwise * @return the in-memory {@link Document} representation * @throws ParsingException if there was a problem reading from the stream */ public Document read( InputStream stream, Charset charset, boolean introspectStringValues ) throws ParsingException { return read(new InputStreamReader(stream, charset), introspectStringValues); } /** * Read the JSON representation from supplied input stream and construct the {@link Document} representation. * * @param reader the IO reader; may not be null * @param introspectStringValues true if the string values should be examined for common patterns, or false otherwise * @return the in-memory {@link Document} representation * @throws ParsingException if there was a problem reading from the stream */ public Document read( Reader reader, boolean introspectStringValues ) throws ParsingException { // Create an object so that this reader is thread safe ... ValueMatcher matcher = introspectStringValues ? DATE_VALUE_MATCHER : SIMPLE_VALUE_MATCHER; return new Parser(new Tokenizer(reader), VALUE_FACTORY, matcher).parseDocument(); } /** * Read the JSON representation from supplied string and construct the {@link Document} representation. * * @param json the JSON representation; may not be null * @param introspectStringValues true if the string values should be examined for common patterns, or false otherwise * @return the in-memory {@link Document} representation * @throws ParsingException if there was a problem reading from the stream */ public Document read( String json, boolean introspectStringValues ) throws ParsingException { return read(new StringReader(json), introspectStringValues); } /** * Return a {@link DocumentSequence} that can be used to pull multiple documents from the stream. * * @param stream the input stream; may not be null * @return the sequence that can be used to get one or more Document instances from a single input */ public DocumentSequence readMultiple( InputStream stream ) { return readMultiple(stream, DEFAULT_INTROSPECT); } /** * Return a {@link DocumentSequence} that can be used to pull multiple documents from the stream. * * @param stream the input stream; may not be null * @param introspectStringValues true if the string values should be examined for common patterns, or false otherwise * @return the sequence that can be used to get one or more Document instances from a single input */ public DocumentSequence readMultiple( InputStream stream, boolean introspectStringValues ) { return readMultiple(new InputStreamReader(stream, Json.UTF8), introspectStringValues); } /** * Return a {@link DocumentSequence} that can be used to pull multiple documents from the stream. * * @param reader the IO reader; may not be null * @return the sequence that can be used to get one or more Document instances from a single input */ public DocumentSequence readMultiple( Reader reader ) { return readMultiple(reader, DEFAULT_INTROSPECT); } /** * Return a {@link DocumentSequence} that can be used to pull multiple documents from the stream. * * @param reader the IO reader; may not be null * @param introspectStringValues true if the string values should be examined for common patterns, or false otherwise * @return the sequence that can be used to get one or more Document instances from a single input */ public DocumentSequence readMultiple( Reader reader, boolean introspectStringValues ) { // Create an object so that this reader is thread safe ... final Tokenizer tokenizer = new Tokenizer(reader); ValueMatcher matcher = introspectStringValues ? DATE_VALUE_MATCHER : SIMPLE_VALUE_MATCHER; final Parser parser = new Parser(tokenizer, VALUE_FACTORY, matcher); return new DocumentSequence() { @Override public Document nextDocument() throws ParsingException { if (tokenizer.isFinished()) return null; Document doc = parser.parseDocument(false); // System.out.println(Json.writePretty(doc)); return doc; } }; } /** * Parse the number represented by the supplied (unquoted) JSON field value. * * @param value the string representation of the value * @return the number, or null if the value could not be parsed */ public static Number parseNumber( String value ) { // Try to parse as a number ... char c = value.charAt(0); if ((c >= '0' && c <= '9') || c == '.' || c == '-' || c == '+') { // It's definitely a number ... if (c == '0' && value.length() > 2) { // it might be a hex number that starts with '0x' char two = value.charAt(1); if (two == 'x' || two == 'X') { try { // Parse the remainder of the hex number ... int integer = Integer.parseInt(value.substring(2), 16); return new Integer(integer); } catch (NumberFormatException e) { // Ignore and continue ... } } } // Try parsing as a double ... try { if ((value.indexOf('.') > -1) || (value.indexOf('E') > -1) || (value.indexOf('e') > -1)) { return Double.parseDouble(value); } Long longObj = new Long(value); long longValue = longObj.longValue(); int intValue = longObj.intValue(); if (longValue == intValue) { // Then it's just an integer ... return new Integer(intValue); } return longObj; } catch (NumberFormatException e) { // ignore ... } } return null; } /** * The component that parses a tokenized JSON stream. * * @author Randall Hauch <rhauch@redhat.com> (C) 2011 Red Hat Inc. * @since 5.1 */ @NotThreadSafe public static class Parser { private final DocumentValueFactory values; private final Tokenizer tokens; private final ValueMatcher valueMatcher; /** * Create a new JsonReader that uses the supplied {@link Tokenizer} instance. * * @param tokenizer the tokenizer that tokenizes the stream of JSON content; may not be null * @param values the factory for creating value objects; may not be null * @param valueMatcher the component that looks for patterns within string values to create alternative objects; may not * be null */ public Parser( Tokenizer tokenizer, DocumentValueFactory values, ValueMatcher valueMatcher ) { this.tokens = tokenizer; this.values = values; this.valueMatcher = valueMatcher; } /** * Parse the stream for the next JSON document. * * @return the document, or null if there are no more documents * @throws ParsingException if there is a problem parsing the value */ public Document parseDocument() throws ParsingException { return parseDocument(null, true); } /** * Parse the stream for the next JSON document. * * @param failIfNotValidDocument true if this method should throw an exception if the stream does not contain a valid * document, or false if null should be returned if there is no valid document on the stream * @return the document, or null if there are no more documents * @throws ParsingException if there is a problem parsing the value */ public Document parseDocument( boolean failIfNotValidDocument ) throws ParsingException { return parseDocument(null, failIfNotValidDocument); } protected BasicDocument newDocument() { return new BasicDocument(); } /** * Parse the stream for the next JSON document. * * @param hasReservedFieldNames the flag that should be set if this document contains field names that are reserved * @param failIfNotValidDocument true if this method should throw an exception if the stream does not contain a valid * document, or false if null should be returned if there is no valid document on the stream * @return the document, or null if there are no more documents * @throws ParsingException if there is a problem parsing the value */ protected Document parseDocument( AtomicBoolean hasReservedFieldNames, boolean failIfNotValidDocument ) throws ParsingException { if (tokens.nextUsefulChar() != '{') { if (failIfNotValidDocument) { throw tokens.error("JSON documents must begin with a '{' character"); } // otherwise just return ... return null; } BasicDocument doc = newDocument(); do { String fieldName = null; // Peek at the next character on the stream ... switch (tokens.peek()) { case 0: throw tokens.error("JSON documents must end with a '}' character"); case '}': tokens.next(); return doc; default: // This should be a field name, so read it ... fieldName = tokens.nextString(); break; } // Now look for any of the following delimieters: ':', "->", or "=>" tokens.nextFieldDelim(); // Now look for a value ... Object value = parseValue(); doc.put(fieldName, value); // Determine if this field is a reserved if (hasReservedFieldNames != null && isReservedFieldName(fieldName)) { hasReservedFieldNames.set(true); } // Look for the delimiter between fields ... if (tokens.nextDocumentDelim()) return doc; } while (true); } protected final boolean isReservedFieldName( String fieldName ) { return fieldName.length() != 0 && fieldName.charAt(0) == '$'; } /** * Parse the JSON array on the stream, beginning with the '[' character until the ']' character, which is consumed. * * @return the array representation; never null but possibly an empty array * @throws ParsingException if there is a problem parsing the value */ public BasicArray parseArray() throws ParsingException { if (tokens.nextUsefulChar() != '[') { throw tokens.error("JSON arrays must begin with a '[' character"); } BasicArray array = new BasicArray(); boolean expectValueSeparator = false; do { // Peek at the next character on the stream ... char c = tokens.peek(); switch (c) { case 0: throw tokens.error("JSON arrays must end with a ']' character"); case ']': tokens.next(); return array; case ',': tokens.next(); expectValueSeparator = false; break; default: if (expectValueSeparator) { throw tokens.error("Invalid character in JSON array: '" + c + "' at line " + tokens.lineNumber() + " column " + tokens.columnNumber()); } // This should be a value .. Object value = parseValue(); array.addValue(value); expectValueSeparator = true; break; } } while (true); } /** * Parse the stream for the next field value, which can be one of the following values: * <ul> * <li>a nested document</li> * <li>an array of values</li> * <li>a string literal, surrounded by single-quote characters</li> * <li>a string literal, surrounded by double-quote characters</li> * <li>a string literal date of the form <code>"<i>yyyy</i>-<i>MM</i>-<i>dd</i>T<i>HH</i>:<i>mm</i>:<i>ss</i>"</code> * where <code>T</code> is a literal character</li> * <li>a string literal date of the form <code>"<i>yyyy</i>-<i>MM</i>-<i>dd</i>T<i>HH</i>:<i>mm</i>:<i>ss</i>Z"</code> * where <code>T</code> and <code>Z</code> are literal characters</li> * <li>a string literal date of the form * <code>"<i>yyyy</i>-<i>MM</i>-<i>dd</i>T<i>HH</i>:<i>mm</i>:<i>ss</i>GMT+<i>00</i>:<i>00</i>"</code> where * <code>T</code>, and <code>GMT</code> are literal characters</li> * <li>a string literal date of the form <code>"/Date(<i>millisOrIso</i>)/"</code></li> * <li>a string literal date of the form <code>"\/Date(<i>millisOrIso</i>)\/"</code></li> * <li>a date literal of the form <code>new Date(<i>millisOrIso</i>)</code></li> * <li>a date literal of the form <code>Date(<i>millisOrIso</i>)</code></li> * <li>a function of the form <code>new <i>functionName</i>(<i>parameters</i>)</code> where <code><i>parameters</i></code> * consists of one or more values as parsed by this method * </ul> * Note that in the date forms listed above, <code><i>millisOrIso</i></code> is either a long value representing the * number of milliseconds since epoch or a string literal in ISO-8601 format representing a date and time. * * @return the field value * @throws ParsingException if there is a problem parsing the value */ public Object parseValue() throws ParsingException { char c = tokens.peek(); switch (c) { case 0: // There's nothing left ... return null; case '{': // Nested object ... AtomicBoolean hasReservedFieldNames = new AtomicBoolean(); Document doc = parseDocument(hasReservedFieldNames, true); if (!hasReservedFieldNames.get()) { return doc; } // Convert the doc with reserved field names ... return processDocumentWithReservedFieldNames(doc); case '[': // Nested array ... return parseArray(); case '"': case '\'': String literal = tokens.nextString(); Object value = valueMatcher.parseValue(literal); return value != null ? value : literal; case 'd': case 'n': String newToken = tokens.nextWord(); // read the 'new' token if ("new".equalsIgnoreCase(newToken) || "date".equalsIgnoreCase(newToken)) { return parseFunction(); } break; } // Looks like it's a number, so try that ... String number = tokens.nextNumber(); return number != null ? parseValue(number, tokens.lineNumber(), tokens.columnNumber()) : number; } /** * Parse the value given by the supplied string located at the supplied line and column numbers. This method looks for * known constant values, then attempts to parse the value as a number, and then calls * {@link #parseUnknownValue(String, int, int)}. * * @param value the string representation of the value * @param lineNumber the line number for the beginning of the value * @param columnNumber the column number for the beginning of the value * @return the value * @throws ParsingException if there is a problem parsing the value */ public Object parseValue( String value, int lineNumber, int columnNumber ) throws ParsingException { if (value.length() == 0) return value; if ("true".equalsIgnoreCase(value)) return Boolean.TRUE; if ("false".equalsIgnoreCase(value)) return Boolean.FALSE; if ("null".equalsIgnoreCase(value)) return Null.getInstance(); // Try to parse as a number ... Number number = parseNumber(value); if (number != null) return number; return parseUnknownValue(value, lineNumber, columnNumber); } /** * Parse the number represented by the supplied value. This method is called by the {@link #parseValue(String, int, int)} * method. * * @param value the string representation of the value * @return the number, or null if the value could not be parsed */ protected Number parseNumber( String value ) { return JsonReader.parseNumber(value); } /** * Override this method if custom value types are expected. * * @param value the string representation of the value * @param lineNumber the line number at which the value starts * @param columnNumber the column number at which the value starts * @return the value * @throws ParsingException if there is a problem parsing the value */ protected Object parseUnknownValue( String value, int lineNumber, int columnNumber ) throws ParsingException { return value; } /** * Parse a function call on the stream. The 'new' keyword has already been processed. * * @return the result of the evaluation of the function * @throws ParsingException if there is a problem parsing the value */ public Object parseFunction() throws ParsingException { // Parse the function name ... int line = tokens.lineNumber(); int col = tokens.columnNumber(); String functionName = tokens.nextString(); FunctionCall function = new FunctionCall(functionName, line, col); // Read the open parenthesis .. char c = tokens.nextUsefulChar(); if (c != '(') { throw tokens.error("Expected '(' after function name \"" + functionName + "\" and at line " + tokens.lineNumber() + ", column " + tokens.columnNumber()); } // Read the parameters ... do { line = tokens.lineNumber(); col = tokens.columnNumber(); Object parameter = parseValue(); if (parameter == null) { break; } function.add(parameter, line, col); } while (true); // Now evaluate the function ... Object value = evaluateFunction(function); return value != null ? value : evaluateUnknownFunction(function); } /** * Method that is called to evaluate the supplied function. This method may be overridden by subclasses to handle custom * functions. * * @param function the function definition * @return the value that resulted from evaluating the function, or null if the function call could not be evaluated * @throws ParsingException if there is a problem parsing the value */ public Object evaluateFunction( FunctionCall function ) throws ParsingException { int numParams = function.size(); if ("date".equalsIgnoreCase(function.getFunctionName())) { if (numParams > 0) { // The parameter should be a long or a timestamp ... FunctionParameter param1 = function.get(0); Object value = param1.getValue(); if (value instanceof Long) { Long millis = (Long)value; return values.createDate(millis.longValue()); } if (value instanceof Integer) { Integer millis = (Integer)value; return values.createDate(millis.longValue()); } if (value instanceof String) { String valueStr = (String)value; try { return values.createDate(valueStr); } catch (ParseException e) { // Not a valid date ... throw tokens.error("Expecting the \"new Date(...)\" parameter to be a valid number of milliseconds or ISO date string, but found \"" + param1.getValue() + "\" at line " + param1.getLineNumber() + ", column " + param1.getColumnNumber()); } } } // Not a valid date ... throw tokens.error("The date function requires one parameter at line " + function.getLineNumber() + ", column " + function.getColumnNumber()); } return null; } /** * Method that is called when the function call described by the parameter could not be evaluated. By default, the string * representation of the function is returned. * * @param function the function definition * @return the value that resulted from evaluating the function * @throws ParsingException if there is a problem parsing the value */ protected Object evaluateUnknownFunction( FunctionCall function ) throws ParsingException { return function.toString(); } @SuppressWarnings( "deprecation" ) protected Object processDocumentWithReservedFieldNames( Document doc ) { if (doc == null) return null; Object value = null; int numFields = doc.size(); if (numFields == 0) return doc; try { if (numFields == 1) { if (!Null.matches(value = doc.get(OBJECT_ID))) { String bytesInBase16 = value.toString(); return values.createObjectId(bytesInBase16); } if (!Null.matches(value = doc.get(DATE))) { if (value instanceof Date) { return value; } String isoDate = value.toString(); try { return values.createDate(isoDate); } catch (ParseException e) { Long millis = Long.parseLong(isoDate); return values.createDate(millis); } } if (!Null.matches(value = doc.get(REGEX_PATTERN))) { String pattern = value.toString(); return values.createRegex(pattern, null); } if (!Null.matches(value = doc.get(UUID))) { return values.createUuid(value.toString()); } if (!Null.matches(value = doc.get(CODE))) { String code = value.toString(); return values.createCode(code); } } else if (numFields == 2) { if (!Null.matches(value = doc.get(TIMESTAMP))) { int time = doc.getInteger(TIMESTAMP); int inc = doc.getInteger(INCREMENT); return values.createTimestamp(time, inc); } if (!Null.matches(value = doc.get(REGEX_PATTERN))) { String pattern = value.toString(); String options = doc.getString(REGEX_OPTIONS); return values.createRegex(pattern, options); } if (!Null.matches(value = doc.get(CODE))) { String code = value.toString(); Document scope = doc.getDocument(SCOPE); return scope != null ? values.createCode(code, scope) : values.createCode(code); } if (!Null.matches(value = doc.get(BINARY_TYPE))) { char c = value.toString().charAt(0); byte type = 0x00; switch (c) { case '0': type = BinaryType.GENERAL; break; case '1': type = BinaryType.FUNCTION; break; case '2': type = BinaryType.BINARY; break; case '3': type = BinaryType.UUID; break; case '5': type = BinaryType.MD5; break; case '8': c = value.toString().charAt(1); if (c == '0') { type = BinaryType.USER_DEFINED; } break; } String data = doc.getString(BASE_64); return values.createBinary(type, Base64.decode(data)); } } } catch (Throwable e) { // ignore } return doc; } protected static class FunctionCall implements Iterable<FunctionParameter> { private final String functionName; private final List<FunctionParameter> parameters = new LinkedList<FunctionParameter>(); private final int lineNumber; private final int columnNumber; public FunctionCall( String functionName, int lineNumber, int columnNumber ) { this.functionName = functionName; this.lineNumber = lineNumber; this.columnNumber = columnNumber; } public String getFunctionName() { return functionName; } public void add( Object parameter, int lineNumber, int columnNumber ) { this.parameters.add(new FunctionParameter(parameter, lineNumber, columnNumber)); } @Override public Iterator<FunctionParameter> iterator() { return parameters.iterator(); } public FunctionParameter get( int index ) { return parameters.get(index); } public int size() { return parameters.size(); } public int getLineNumber() { return lineNumber; } public int getColumnNumber() { return columnNumber; } @Override public String toString() { StringBuilder sb = new StringBuilder(functionName); sb.append('('); boolean first = true; for (FunctionParameter parameter : parameters) { if (first) { first = false; } else { sb.append(','); } sb.append(parameter.getValue()); } sb.append(')'); return sb.toString(); } } @Immutable protected static class FunctionParameter { private final Object value; private final int lineNumber; private final int columnNumber; public FunctionParameter( Object value, int lineNumber, int columnNumber ) { this.value = value; this.lineNumber = lineNumber; this.columnNumber = columnNumber; } public Object getValue() { return value; } public int getLineNumber() { return lineNumber; } public int getColumnNumber() { return columnNumber; } @Override public String toString() { return Json.write(value); } } } /** * The component that matches a string value for certain patterns. If the value matches a known pattern, it return the * appropriate value object; otherwise, the supplied string value is returned. * * @author Randall Hauch <rhauch@redhat.com> (C) 2011 Red Hat Inc. * @since 5.1 */ @NotThreadSafe public static interface ValueMatcher { /** * Parse the value given by the supplied string into an appropriate value object. This method looks for specific patterns * of Date strings; if no known pattern is found, it just returns the supplied value. * * @param value the string representation of the value * @return the value */ public Object parseValue( String value ); } /** * The component that matches a string value for certain patterns. If the value matches a known pattern, it return the * appropriate value object; otherwise, the supplied string value is returned. * * @author Randall Hauch <rhauch@redhat.com> (C) 2011 Red Hat Inc. * @since 5.1 */ @NotThreadSafe public static class SimpleValueMatcher implements ValueMatcher { protected final DocumentValueFactory values; /** * Create a new matcher that uses the supplied {@link DocumentValueFactory} instance. * * @param values the factory for creating value objects; may not be null */ public SimpleValueMatcher( DocumentValueFactory values ) { this.values = values; } /** * Parse the value given by the supplied string into an appropriate value object. This method looks for specific patterns * of Date strings; if no known pattern is found, it just returns the supplied value. * * @param value the string representation of the value * @return the value */ @Override public Object parseValue( String value ) { return value; } } /** * The component that parses a tokenized JSON stream and attempts to evaluate literal values such as dates * * @author Randall Hauch <rhauch@redhat.com> (C) 2011 Red Hat Inc. * @since 5.1 */ @NotThreadSafe public static class DateValueMatcher extends SimpleValueMatcher { /** * Create a new matcher that uses the supplied {@link DocumentValueFactory} instance. * * @param values the factory for creating value objects; may not be null */ public DateValueMatcher( DocumentValueFactory values ) { super(values); } /** * Parse the value given by the supplied string into an appropriate value object. This method looks for specific patterns * of Date strings; if no known pattern is found, it just returns the supplied value. * * @param value the string representation of the value * @return the value */ @Override public Object parseValue( String value ) { if (value != null) { if (value.length() > 2) { Date date = parseDateFromLiteral(value); if (date != null) { return date; } } // Unescape escaped characters ... value = unescapeValue(value); } return value; } protected String unescapeValue( String value ) { if (value == null || value.length() == 0) return value; StringBuilder sb = new StringBuilder(value.length()); CharacterIterator iter = new StringCharacterIterator(value); for (char c = iter.first(); c != CharacterIterator.DONE; c = iter.next()) { switch (c) { case '\\': // The character might be an escape sequence, so output the backslash ... char next = iter.next(); switch (next) { case CharacterIterator.DONE: // This was the last character, so we're done ... sb.append(c); break; case '\\': case '/': // optional case '\b': case '\f': case '\n': case '\r': case '\t': // This is an escaped sequence ... sb.append(next); break; case 'u': // This is an unicode escape sequence, so we already output one of them ... char first = iter.next(); char second = iter.next(); char third = iter.next(); char fourth = iter.next(); String string = "" + first + second + third + fourth; try { char uni = (char)Integer.parseInt(string, 16); sb.append(uni); } catch (NumberFormatException e) { // this is not a valid unicode escape sequence so just append it as is sb.append("\\u").append(string); continue; } break; default: // It's not an escape sequence that we care about. We've already written the backslash, // so just write the character ... sb.append(c); sb.append(next); } break; default: // Unicode escapes are handled above ... sb.append(c); break; } } return sb.toString(); } /** * Parse the date represented by the supplied value. This method is called by the {@link #parseValue(String)} method. This * method checks the following formats: * <ul> * <li>a string literal date of the form <code>"<i>yyyy</i>-<i>MM</i>-<i>dd</i>T<i>HH</i>:<i>mm</i>:<i>ss</i>"</code> * where <code>T</code> is a literal character</li> * <li>a string literal date of the form <code>"<i>yyyy</i>-<i>MM</i>-<i>dd</i>T<i>HH</i>:<i>mm</i>:<i>ss</i>Z"</code> * where <code>T</code> and <code>Z</code> are literal characters</li> * <li>a string literal date of the form * <code>"<i>yyyy</i>-<i>MM</i>-<i>dd</i>T<i>HH</i>:<i>mm</i>:<i>ss</i>GMT+<i>00</i>:<i>00</i>"</code> where * <code>T</code>, and <code>GMT</code> are literal characters</li> * <li>a string literal date of the form <code>"/Date(<i>millisOrIso</i>)/"</code></li> * <li>a string literal date of the form <code>"\/Date(<i>millisOrIso</i>)\/"</code></li> * </ul> * <p> * Note that this method does not handle the <code>new Date(...)</code> or <code>Date(...)</code> representations, as * that's handled elsewhere. * </p> * * @param value the string representation of the value; never null and never empty * @return the number, or null if the value could not be parsed */ protected Date parseDateFromLiteral( String value ) { char f = value.charAt(0); if (Character.isDigit(f)) { // Try as simply an ISO-8601 formatted date ... return evaluateDate(value); } if (value.startsWith("\\/Date(") && value.endsWith(")\\/")) { String millisOrIso = value.substring(7, value.length() - 3).trim(); return evaluateDate(millisOrIso); } if (value.startsWith("/Date(") && value.endsWith(")/")) { String millisOrIso = value.substring(6, value.length() - 2).trim(); return evaluateDate(millisOrIso); } return null; } protected Date evaluateDate( String millisOrIso ) { try { return values.createDate(millisOrIso); } catch (ParseException e) { // not an ISO-8601 format ... } return null; } } /** * The component that tokenizes a stream of JSON content. * * @author Randall Hauch <rhauch@redhat.com> (C) 2011 Red Hat Inc. * @since 5.1 */ @NotThreadSafe public static class Tokenizer { private final Reader reader; private int lineNumber; private int columnNumber; private boolean finished; private boolean hasPrevious; private char previous; private StringBuilder stringBuilder = new StringBuilder(128); /** * Create a new tokenizer that uses the supplied {@link Reader Java IO Reader} instance. * * @param reader the reader for accessing the JSON content; may not be null */ public Tokenizer( Reader reader ) { this.reader = reader; } public boolean isFinished() { return finished; } protected char next() throws ParsingException { char c = 0; if (hasPrevious) { hasPrevious = false; c = previous; } else { try { int x = reader.read(); if (x <= 0) { // We've reached the end of the stream ... finished = true; c = 0; } else { c = (char)x; } } catch (IOException e) { throw error("Error reading at line " + lineNumber + ", column " + columnNumber + ": " + e.getLocalizedMessage(), e); } } // It's a valid character, but we have to advance our line & column counts ... if (previous == '\r') { ++lineNumber; columnNumber = (c == '\n' ? 0 : 1); } else if (c == '\n') { ++lineNumber; columnNumber = 0; } else { ++columnNumber; } previous = c; return c; } public String next( int characterCount ) throws ParsingException { StringBuilder sb = stringBuilder(); for (int i = 0; i != characterCount; ++i) { sb.append(next()); } return complete(sb); } protected final StringBuilder stringBuilder() { StringBuilder stringBuilder = this.stringBuilder != null ? this.stringBuilder : new StringBuilder(128); this.stringBuilder = null; stringBuilder.delete(0, stringBuilder.length()); return stringBuilder; } protected final String complete( StringBuilder sb ) { assert sb != null; assert stringBuilder == null; stringBuilder = sb; return sb.toString(); } public char nextUsefulChar() throws ParsingException { boolean withinComment = false; do { char next = next(); if (next == '/') { char afterNext = next(); if (afterNext != '/') { throw error("Invalid character '" + afterNext + "' (expected comment //)"); } withinComment = true; continue; } boolean isLineSeparator = (next == '\n') || (next == '\r'); if (isLineSeparator && withinComment) { withinComment = false; } if (next == 0 || (!withinComment && next != ' ' && next != '\t' && !isLineSeparator)) return next; } while (true); } public char peek() throws ParsingException { if (hasPrevious) { return previous; } char next = nextUsefulChar(); hasPrevious = true; previous = next; --columnNumber; return next; } /** * Read the next quoted string from the stream, where stream begins with the a single-quote or double-quote character and * the string ends with the same quote character. * * @return the next string; never null * @throws ParsingException */ public String nextString() throws ParsingException { char c = nextUsefulChar(); switch (c) { case '"': case '\'': return nextString(c); } throw error("Expecting a field name at line " + lineNumber + ", column " + columnNumber + ". Check for a missing comma."); } public String nextString( char endQuote ) throws ParsingException { StringBuilder sb = stringBuilder(); char c = 0; do { c = next(); switch (c) { case 0: case '\n': case '\r': // The string was not properly terminated ... throw error("The string was not terminated before the end of line or end of document, at line " + lineNumber + ", column " + columnNumber); case '\\': // Escape sequence ... c = next(); switch (c) { case '\'': // single quote case '"': // double quote case '\\': // reverse solidus case '/': // forward solidus break; case 'b': c = '\b'; break; case 'f': c = '\f'; break; case 'n': c = '\n'; break; case 'r': c = '\r'; break; case 't': c = '\t'; break; case 'u': // Unicode sequence made of exactly 4 hex characters ... char[] hex = new char[4]; hex[0] = next(); hex[1] = next(); hex[2] = next(); hex[3] = next(); String code = new String(hex, 0, 4); try { c = (char)Integer.parseInt(code, 16); // hex } catch (NumberFormatException e) { // this is not a valid unicode escape sequence so just append it as is sb.append('\\').append('u').append(code); continue; } break; default: // No other characters are valid escaped sequences, so this is actually just a backslash // followed by the current character c. So append the backslash ... sb.append('\\'); // then the character ... break; } sb.append(c); break; default: // Just a regular character (or the end quote) ... if (c == endQuote) { // This is the only way to successfully exit this method! return complete(sb); } // just a regular character ... sb.append(c); } } while (true); } public void nextFieldDelim() throws ParsingException { try { switch (nextUsefulChar()) { case ':': case '=': if (peek() == '>') { next(); // consume the '>' } break; } } catch (ParsingException e) { throw error("Expecting a field delimiter (either ':', '=' or '=>') at line " + lineNumber + ", column " + columnNumber); } } /** * Consume the next document delimiter (either a ',' or a ';'), and return whether the end-of-document character (e.g., * '}') has been consumed. This will correctly handle repeated delimiters, which are technically incorrect. * * @return true if a '}' has been consumed, or false otherwise * @throws ParsingException if the document delimiter could not be read */ public boolean nextDocumentDelim() throws ParsingException { switch (nextUsefulChar()) { case ';': // handle ';' delimiters, too! case ',': switch (peek()) { case ':': case ',': // There are multiple delimiters in a row. Strictly speaking, this is invalid but we // can easily handle it anyway ... return nextDocumentDelim(); case '}': // The comma was before '}' - this is not strictly well-formed, but we'll handle it next(); return true; } return false; case '}': return true; } return false; } /** * Return a string containing the next number on the stream. * * @return the next number as a string, or null if there is no content on the stream * @throws ParsingException if the number could not be read */ public String nextNumber() throws ParsingException { char c = peek(); if (c == 0) { return null; } StringBuilder sb = stringBuilder(); while (c > ' ' && "{}[]:\"=#/\\',;".indexOf(c) <= -1) { if (c == 0) { break; } sb.append(next()); c = peek(); } return complete(sb); } /** * Return a string containing the next alpha-numeric word on the stream. * * @return the next word as a string * @throws ParsingException if the number could not be read */ public String nextWord() throws ParsingException { char c = peek(); StringBuilder sb = stringBuilder(); while (Character.isLetterOrDigit(c)) { sb.append(next()); c = peek(); } return complete(sb); } public ParsingException error( String message ) { return new ParsingException(message, lineNumber, columnNumber); } public ParsingException error( String message, Throwable t ) { return new ParsingException(message, t, lineNumber, columnNumber); } public int lineNumber() { return lineNumber; } public int columnNumber() { return columnNumber; } } }
phantomjinx/modeshape
modeshape-schematic/src/main/java/org/infinispan/schematic/internal/document/JsonReader.java
Java
apache-2.0
60,038
<!-- ~ Hibernate Validator, declare and validate application constraints ~ ~ License: Apache License, Version 2.0 ~ See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>. --> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html> <head> </head> <body> <p>Implementations of the MessageInterpolator interface in particular ResourceBundleMessageInterpolator which can be used by custom implementations of the interface for delegation.</p> <p>This package is part of the public Hibernate Validator API.</p> </body> </html>
DavideD/hibernate-validator
engine/src/main/java/org/hibernate/validator/messageinterpolation/package.html
HTML
apache-2.0
640
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using Microsoft.VisualStudio.Text; namespace TestUtilities.Mocks { public class MockTrackingSpan : ITrackingSpan { private readonly int _start, _length; private readonly MockTextSnapshot _snapshot; private readonly SpanTrackingMode _trackingMode; private readonly ITrackingPoint _startPoint, _endPoint; public MockTrackingSpan(MockTextSnapshot snapshot, int start, int length, SpanTrackingMode trackingMode = SpanTrackingMode.EdgeExclusive) { _start = start; _length = length; _snapshot = snapshot; _trackingMode = trackingMode; switch (_trackingMode) { case SpanTrackingMode.EdgeExclusive: _startPoint = new MockTrackingPoint(snapshot, start, PointTrackingMode.Positive); _endPoint = new MockTrackingPoint(snapshot, start + length, PointTrackingMode.Negative); break; case SpanTrackingMode.EdgeInclusive: _startPoint = new MockTrackingPoint(snapshot, start, PointTrackingMode.Negative); _endPoint = new MockTrackingPoint(snapshot, start + length, PointTrackingMode.Positive); break; case SpanTrackingMode.EdgeNegative: _startPoint = new MockTrackingPoint(snapshot, start, PointTrackingMode.Negative); _endPoint = new MockTrackingPoint(snapshot, start + length, PointTrackingMode.Negative); break; case SpanTrackingMode.EdgePositive: _startPoint = new MockTrackingPoint(snapshot, start, PointTrackingMode.Positive); _endPoint = new MockTrackingPoint(snapshot, start + length, PointTrackingMode.Positive); break; } } public SnapshotPoint GetEndPoint(ITextSnapshot snapshot) { return new SnapshotPoint(_snapshot, _start + _length); } public Span GetSpan(ITextVersion version) { return Span.FromBounds( _startPoint.GetPosition(version), _endPoint.GetPosition(version) ); } public SnapshotSpan GetSpan(ITextSnapshot snapshot) { return new SnapshotSpan(snapshot, GetSpan(snapshot.Version)); } public SnapshotPoint GetStartPoint(ITextSnapshot snapshot) { var span = GetSpan(snapshot.Version); return new SnapshotPoint(snapshot, span.Start); } public string GetText(ITextSnapshot snapshot) { var span = GetSpan(snapshot.Version); return snapshot.GetText(span); } public ITextBuffer TextBuffer { get { return _snapshot.TextBuffer; } } public TrackingFidelityMode TrackingFidelity { get { throw new NotImplementedException(); } } public SpanTrackingMode TrackingMode { get { return _trackingMode; } } } }
paulvanbrenk/nodejstools
Common/Tests/Utilities/Mocks/MockTrackingSpan.cs
C#
apache-2.0
3,329
// Copyright 2018 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using Xunit; // This is needed to ensure trace tests that rely on the RateLimiter // do not affect each other. [assembly: CollectionBehavior(DisableTestParallelization = true)]
googleapis/google-cloud-dotnet
apis/Google.Cloud.Diagnostics.AspNetCore/Google.Cloud.Diagnostics.AspNetCore.Snippets/AssemblyInfo.cs
C#
apache-2.0
794
package sampleclean.clean.featurize import org.apache.spark.sql.Row /* With the ensemble featurizer we can take a set of featurizers and combine them together. */ @serializable private [sampleclean] abstract class EnsembleFeaturizer(cols: List[Int], featurizers:List[Featurizer]){ /** */ def featurize[K,V](rows: Set[Row], params: collection.immutable.Map[K,V]=null): (Set[Row], Array[Double])= { var pkset:Set[Row] = Set() var feature:Array[Double] = Array() for (featurizer <- featurizers) { val result = featurizer.featurize(rows, params) pkset = pkset ++ result._1 feature = Array.concat(feature, result._2) } return (pkset, feature) } }
agilemobiledev/sampleclean-async
src/main/scala/sampleclean/clean/featurize/EnsembleFeaturizer.scala
Scala
apache-2.0
674
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.xmlrpc.server; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.List; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xmlrpc.XmlRpcException; import org.apache.xmlrpc.XmlRpcRequest; import org.apache.xmlrpc.XmlRpcRequestConfig; import org.apache.xmlrpc.common.ServerStreamConnection; import org.apache.xmlrpc.common.XmlRpcStreamRequestConfig; import org.apache.xmlrpc.common.XmlRpcStreamRequestProcessor; import org.apache.xmlrpc.parser.XmlRpcRequestParser; import org.apache.xmlrpc.serializer.DefaultXMLWriterFactory; import org.apache.xmlrpc.serializer.XmlRpcWriter; import org.apache.xmlrpc.serializer.XmlWriterFactory; import org.apache.xmlrpc.util.SAXParsers; import org.xml.sax.ContentHandler; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; /** Extension of {@link XmlRpcServer} with support for reading * requests from a stream and writing the response to another * stream. */ public abstract class XmlRpcStreamServer extends XmlRpcServer implements XmlRpcStreamRequestProcessor { private static final Log log = LogFactory.getLog(XmlRpcStreamServer.class); private XmlWriterFactory writerFactory = new DefaultXMLWriterFactory(); private static final XmlRpcErrorLogger theErrorLogger = new XmlRpcErrorLogger(); private XmlRpcErrorLogger errorLogger = theErrorLogger; protected XmlRpcRequest getRequest(final XmlRpcStreamRequestConfig pConfig, InputStream pStream) throws XmlRpcException { final XmlRpcRequestParser parser = new XmlRpcRequestParser(pConfig, getTypeFactory()); final XMLReader xr = SAXParsers.newXMLReader(); xr.setContentHandler(parser); try { xr.parse(new InputSource(pStream)); } catch (SAXException e) { Exception ex = e.getException(); if (ex != null && ex instanceof XmlRpcException) { throw (XmlRpcException) ex; } throw new XmlRpcException("Failed to parse XML-RPC request: " + e.getMessage(), e); } catch (IOException e) { throw new XmlRpcException("Failed to read XML-RPC request: " + e.getMessage(), e); } final List params = parser.getParams(); return new XmlRpcRequest(){ public XmlRpcRequestConfig getConfig() { return pConfig; } public String getMethodName() { return parser.getMethodName(); } public int getParameterCount() { return params == null ? 0 : params.size(); } public Object getParameter(int pIndex) { return params.get(pIndex); } }; } protected XmlRpcWriter getXmlRpcWriter(XmlRpcStreamRequestConfig pConfig, OutputStream pStream) throws XmlRpcException { ContentHandler w = getXMLWriterFactory().getXmlWriter(pConfig, pStream); return new XmlRpcWriter(pConfig, w, getTypeFactory()); } protected void writeResponse(XmlRpcStreamRequestConfig pConfig, OutputStream pStream, Object pResult) throws XmlRpcException { try { getXmlRpcWriter(pConfig, pStream).write(pConfig, pResult); } catch (SAXException e) { throw new XmlRpcException("Failed to write XML-RPC response: " + e.getMessage(), e); } } /** * This method allows to convert the error into another error. For example, this * may be an error, which could be deserialized by the client. */ protected Throwable convertThrowable(Throwable pError) { return pError; } protected void writeError(XmlRpcStreamRequestConfig pConfig, OutputStream pStream, Throwable pError) throws XmlRpcException { final Throwable error = convertThrowable(pError); final int code; final String message; if (error instanceof XmlRpcException) { XmlRpcException ex = (XmlRpcException) error; code = ex.code; } else { code = 0; } message = error.getMessage(); try { getXmlRpcWriter(pConfig, pStream).write(pConfig, code, message, error); } catch (SAXException e) { throw new XmlRpcException("Failed to write XML-RPC response: " + e.getMessage(), e); } } /** Sets the XML Writer factory. * @param pFactory The XML Writer factory. */ public void setXMLWriterFactory(XmlWriterFactory pFactory) { writerFactory = pFactory; } /** Returns the XML Writer factory. * @return The XML Writer factory. */ public XmlWriterFactory getXMLWriterFactory() { return writerFactory; } protected InputStream getInputStream(XmlRpcStreamRequestConfig pConfig, ServerStreamConnection pConnection) throws IOException { InputStream istream = pConnection.newInputStream(); if (pConfig.isEnabledForExtensions() && pConfig.isGzipCompressing()) { istream = new GZIPInputStream(istream); } return istream; } /** Called to prepare the output stream. Typically used for enabling * compression, or similar filters. * @param pConnection The connection object. */ protected OutputStream getOutputStream(ServerStreamConnection pConnection, XmlRpcStreamRequestConfig pConfig, OutputStream pStream) throws IOException { if (pConfig.isEnabledForExtensions() && pConfig.isGzipRequesting()) { return new GZIPOutputStream(pStream); } else { return pStream; } } /** Called to prepare the output stream, if content length is * required. * @param pConfig The configuration object. * @param pSize The requests size. */ protected OutputStream getOutputStream(XmlRpcStreamRequestConfig pConfig, ServerStreamConnection pConnection, int pSize) throws IOException { return pConnection.newOutputStream(); } /** Returns, whether the requests content length is required. * @param pConfig The configuration object. */ protected boolean isContentLengthRequired(XmlRpcStreamRequestConfig pConfig) { return false; } /** Returns, whether the /** Processes a "connection". The "connection" is an opaque object, which is * being handled by the subclasses. * @param pConfig The request configuration. * @param pConnection The "connection" being processed. * @throws XmlRpcException Processing the request failed. */ public void execute(XmlRpcStreamRequestConfig pConfig, ServerStreamConnection pConnection) throws XmlRpcException { log.debug("execute: ->"); try { Object result; Throwable error; InputStream istream = null; try { istream = getInputStream(pConfig, pConnection); XmlRpcRequest request = getRequest(pConfig, istream); result = execute(request); istream.close(); istream = null; error = null; log.debug("execute: Request performed successfully"); } catch (Throwable t) { logError(t); result = null; error = t; } finally { if (istream != null) { try { istream.close(); } catch (Throwable ignore) {} } } boolean contentLengthRequired = isContentLengthRequired(pConfig); ByteArrayOutputStream baos; OutputStream ostream; if (contentLengthRequired) { baos = new ByteArrayOutputStream(); ostream = baos; } else { baos = null; ostream = pConnection.newOutputStream(); } ostream = getOutputStream(pConnection, pConfig, ostream); try { if (error == null) { writeResponse(pConfig, ostream, result); } else { writeError(pConfig, ostream, error); } ostream.close(); ostream = null; } finally { if (ostream != null) { try { ostream.close(); } catch (Throwable ignore) {} } } if (baos != null) { OutputStream dest = getOutputStream(pConfig, pConnection, baos.size()); try { baos.writeTo(dest); dest.close(); dest = null; } finally { if (dest != null) { try { dest.close(); } catch (Throwable ignore) {} } } } pConnection.close(); pConnection = null; } catch (IOException e) { throw new XmlRpcException("I/O error while processing request: " + e.getMessage(), e); } finally { if (pConnection != null) { try { pConnection.close(); } catch (Throwable ignore) {} } } log.debug("execute: <-"); } protected void logError(Throwable t) { final String msg = t.getMessage() == null ? t.getClass().getName() : t.getMessage(); errorLogger.log(msg, t); } /** * Returns the error logger. */ public XmlRpcErrorLogger getErrorLogger() { return errorLogger; } /** * Sets the error logger. */ public void setErrorLogger(XmlRpcErrorLogger pErrorLogger) { errorLogger = pErrorLogger; } }
kralf/ros-android
src/xmlrpc/org/apache/xmlrpc/server/XmlRpcStreamServer.java
Java
apache-2.0
9,381