text
stringlengths 2
1.04M
| meta
dict |
---|---|
"""Launch the experiment server."""
def main():
# Make sure gevent patches are applied early.
import gevent.monkey
gevent.monkey.patch_all()
from dallinger.experiment_server.gunicorn import launch
launch()
if __name__ == "__main__":
main()
| {
"content_hash": "48d8c4a0af126eb134809b786f3805c3",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 59,
"avg_line_length": 18,
"alnum_prop": 0.6481481481481481,
"repo_name": "Dallinger/Dallinger",
"id": "4e8a1383f4da3527d49d0a4d47ee3071e023d266",
"size": "270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dallinger_scripts/web.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2204"
},
{
"name": "Dockerfile",
"bytes": "4288"
},
{
"name": "HTML",
"bytes": "62909"
},
{
"name": "JavaScript",
"bytes": "49602"
},
{
"name": "Jinja",
"bytes": "4871"
},
{
"name": "Procfile",
"bytes": "88"
},
{
"name": "Python",
"bytes": "1131695"
},
{
"name": "Ruby",
"bytes": "1769"
},
{
"name": "Shell",
"bytes": "2905"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.11"/>
<title>V8 API Reference Guide for node.js v7.5.0: Member List</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<script type="text/javascript">
$(document).ready(function() { init_search(); });
</script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">V8 API Reference Guide for node.js v7.5.0
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.11 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main Page</span></a></li>
<li><a href="namespaces.html"><span>Namespaces</span></a></li>
<li class="current"><a href="annotated.html"><span>Classes</span></a></li>
<li><a href="files.html"><span>Files</span></a></li>
<li><a href="examples.html"><span>Examples</span></a></li>
<li>
<div id="MSearchBox" class="MSearchBoxInactive">
<span class="left">
<img id="MSearchSelect" src="search/mag_sel.png"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
alt=""/>
<input type="text" id="MSearchField" value="Search" accesskey="S"
onfocus="searchBox.OnSearchFieldFocus(true)"
onblur="searchBox.OnSearchFieldFocus(false)"
onkeyup="searchBox.OnSearchFieldChange(event)"/>
</span><span class="right">
<a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a>
</span>
</div>
</li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="annotated.html"><span>Class List</span></a></li>
<li><a href="classes.html"><span>Class Index</span></a></li>
<li><a href="inherits.html"><span>Class Hierarchy</span></a></li>
<li><a href="functions.html"><span>Class Members</span></a></li>
</ul>
</div>
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div id="nav-path" class="navpath">
<ul>
<li class="navelem"><a class="el" href="namespacev8.html">v8</a></li><li class="navelem"><a class="el" href="structv8_1_1CopyablePersistentTraits.html">CopyablePersistentTraits</a></li> </ul>
</div>
</div><!-- top -->
<div class="header">
<div class="headertitle">
<div class="title">v8::CopyablePersistentTraits< T > Member List</div> </div>
</div><!--header-->
<div class="contents">
<p>This is the complete list of members for <a class="el" href="structv8_1_1CopyablePersistentTraits.html">v8::CopyablePersistentTraits< T ></a>, including all inherited members.</p>
<table class="directory">
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Copy</b>(const Persistent< S, M > &source, CopyablePersistent *dest) (defined in <a class="el" href="structv8_1_1CopyablePersistentTraits.html">v8::CopyablePersistentTraits< T ></a>)</td><td class="entry"><a class="el" href="structv8_1_1CopyablePersistentTraits.html">v8::CopyablePersistentTraits< T ></a></td><td class="entry"><span class="mlabel">inline</span><span class="mlabel">static</span></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>CopyablePersistent</b> typedef (defined in <a class="el" href="structv8_1_1CopyablePersistentTraits.html">v8::CopyablePersistentTraits< T ></a>)</td><td class="entry"><a class="el" href="structv8_1_1CopyablePersistentTraits.html">v8::CopyablePersistentTraits< T ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>kResetInDestructor</b> (defined in <a class="el" href="structv8_1_1CopyablePersistentTraits.html">v8::CopyablePersistentTraits< T ></a>)</td><td class="entry"><a class="el" href="structv8_1_1CopyablePersistentTraits.html">v8::CopyablePersistentTraits< T ></a></td><td class="entry"><span class="mlabel">static</span></td></tr>
</table></div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated by  <a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.11
</small></address>
</body>
</html>
| {
"content_hash": "985ee47f62750b9a6b1002c1488e849b",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 486,
"avg_line_length": 52.5045871559633,
"alnum_prop": 0.6641621527171064,
"repo_name": "v8-dox/v8-dox.github.io",
"id": "5810f2c95f10c71a3d71f5afdc51dc8786268528",
"size": "5723",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "da59a57/html/structv8_1_1CopyablePersistentTraits-members.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
from os.path import join
import bzt
from bzt.modules.aggregator import DataPoint, KPISet
from bzt.modules.k6 import K6Executor, K6LogReader
from bzt.utils import EXE_SUFFIX
from tests.unit import BZTestCase, ExecutorTestCase, RESOURCES_DIR, ROOT_LOGGER
TOOL_NAME = join(RESOURCES_DIR, "k6", "k6_mock" + EXE_SUFFIX)
K6_SCRIPT = join(RESOURCES_DIR, "k6", "k6_script.js")
class TestK6Executor(ExecutorTestCase):
EXECUTOR = K6Executor
CMD_LINE = None
def start_subprocess(self, args, **kwargs):
self.CMD_LINE = " ".join(args)
def test_full(self):
self.configure({"execution": {
"concurrency": 5,
"hold-for": "30",
"iterations": 50,
"scenario": {"script": K6_SCRIPT}}})
tmp_eac = bzt.utils.exec_and_communicate
try:
bzt.utils.exec_and_communicate = lambda *args, **kwargs: ("", "")
self.obj.prepare()
finally:
bzt.utils.exec_and_communicate = tmp_eac
self.obj.get_widget()
self.obj.k6.tool_name = TOOL_NAME
self.obj.startup()
self.obj.check()
self.obj.shutdown()
self.obj.post_process()
def simple_run(self, config):
self.configure(config)
tmp_eac = bzt.utils.exec_and_communicate
try:
bzt.utils.exec_and_communicate = lambda *args, **kwargs: ("", "")
self.obj.prepare()
finally:
bzt.utils.exec_and_communicate = tmp_eac
self.obj.engine.start_subprocess = self.start_subprocess
self.obj.startup()
self.obj.post_process()
def test_kpi_file(self):
self.simple_run({
"execution": {
"scenario": {"script": K6_SCRIPT},
"executor": "k6"
},
})
self.assertIn(f"--out csv={self.obj.kpi_file}", self.CMD_LINE)
def test_concurrency(self):
self.simple_run({
"execution": {
"concurrency": "5",
"scenario": {"script": K6_SCRIPT},
"executor": "k6"
},
})
self.assertIn("--vus 5", self.CMD_LINE)
def test_hold_for(self):
self.simple_run({
"execution": {
"hold-for": "30",
"scenario": {"script": K6_SCRIPT},
"executor": "k6"
},
})
self.assertIn("--duration 30s", self.CMD_LINE)
def test_iterations(self):
self.simple_run({
"execution": {
"iterations": "100",
"scenario": {"script": K6_SCRIPT},
"executor": "k6"
},
})
self.assertIn("--iterations 100", self.CMD_LINE)
def test_iterations_multiplied(self):
self.simple_run({
"execution": {
"iterations": "10",
"concurrency": "10",
"scenario": {"script": K6_SCRIPT},
"executor": "k6"
},
})
self.assertIn("--iterations 100", self.CMD_LINE)
class TestK6Reader(BZTestCase):
def test_read(self):
log_path = join(RESOURCES_DIR, "k6", "k6_kpi.csv")
obj = K6LogReader(log_path, ROOT_LOGGER)
points = list(obj.datapoints(True))
self.assertEqual(len(points), 4)
for datapoint in points:
self.assertTrue(datapoint['ts'] > 1500000000)
self.assertEqual(points[-1][DataPoint.CUMULATIVE][''][KPISet.SUCCESSES], 2)
self.assertEqual(points[-1][DataPoint.CUMULATIVE][''][KPISet.FAILURES], 2)
| {
"content_hash": "6da9c49ddd605ed803ba209b88f7032f",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 83,
"avg_line_length": 30.296610169491526,
"alnum_prop": 0.5362237762237763,
"repo_name": "Blazemeter/taurus",
"id": "2c5dff8df3561fa986fef22187159f00632f1baf",
"size": "3575",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/unit/modules/test_k6.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4357"
},
{
"name": "C",
"bytes": "5131"
},
{
"name": "C#",
"bytes": "18482"
},
{
"name": "CSS",
"bytes": "5298"
},
{
"name": "Dockerfile",
"bytes": "5222"
},
{
"name": "Groovy",
"bytes": "3280"
},
{
"name": "HTML",
"bytes": "5136"
},
{
"name": "Java",
"bytes": "9586"
},
{
"name": "JavaScript",
"bytes": "27121"
},
{
"name": "PHP",
"bytes": "8787"
},
{
"name": "PLpgSQL",
"bytes": "3712"
},
{
"name": "Python",
"bytes": "2160323"
},
{
"name": "RobotFramework",
"bytes": "6383"
},
{
"name": "Ruby",
"bytes": "4184"
},
{
"name": "Scala",
"bytes": "15526"
},
{
"name": "Shell",
"bytes": "12058"
},
{
"name": "Smarty",
"bytes": "13606"
}
],
"symlink_target": ""
} |
<?php
/**
* Site configuration, this file is changed by user per site.
*
*/
/**
* Set level of error reporting
*/
error_reporting(-1);
ini_set('display_errors', 1);
/**
* Set what to show as debug or developer information in the get_debug() theme helper.
*/
$ra->config['debug']['rama'] = false;
$ra->config['debug']['session'] = false;
$ra->config['debug']['timer'] = false;
$ra->config['debug']['db-num-queries'] = false;
$ra->config['debug']['db-queries'] = false;
/**
* Set database(s).
*/
$ra->config['database'][0]['dsn'] = 'sqlite:' . RAMA_SITE_PATH . '/data/.ht.sqlite';
/**
* What type of urls should be used?
*
* default = 0 => index.php/controller/method/arg1/arg2/arg3
* clean = 1 => controller/method/arg1/arg2/arg3
* querystring = 2 => index.php?q=controller/method/arg1/arg2/arg3
*/
$ra->config['url_type'] = 1;
/**
* Set a base_url to use another than the default calculated
*/
$ra->config['base_url'] = null;
/**
* How to hash password of new users, choose from: plain, md5salt, md5, sha1salt, sha1.
*/
$ra->config['hashing_algorithm'] = 'sha1salt';
/**
* Allow or disallow creation of new user accounts.
*/
$ra->config['create_new_users'] = true;
/**
* Define session name
*/
$ra->config['session_name'] = preg_replace('/[:\.\/-_]/', '', __DIR__);
$ra->config['session_key'] = 'rama';
/**
* Define default server timezone when displaying date and times to the user. All internals are still UTC.
*/
$ra->config['timezone'] = 'Europe/Stockholm';
/**
* Define internal character encoding
*/
$ra->config['character_encoding'] = 'UTF-8';
/**
* Define language
*/
$ra->config['language'] = 'en';
/**
* Define the controllers, their classname and enable/disable them.
*
* The array-key is matched against the url, for example:
* the url 'developer/dump' would instantiate the controller with the key "developer", that is
* CCDeveloper and call the method "dump" in that class. This process is managed in:
* $ra->FrontControllerRoute();
* which is called in the frontcontroller phase from index.php.
*/
$ra->config['controllers'] = array(
'index' => array('enabled' => true,'class' => 'CCIndex'),
'developer' => array('enabled' => true,'class' => 'CCDeveloper'),
'theme' => array('enabled' => true,'class' => 'CCTheme'),
'guestbook' => array('enabled' => true,'class' => 'CCGuestbook'),
'content' => array('enabled' => true,'class' => 'CCContent'),
'blog' => array('enabled' => true,'class' => 'CCBlog'),
'page' => array('enabled' => true,'class' => 'CCPage'),
'user' => array('enabled' => true,'class' => 'CCUser'),
'acp' => array('enabled' => true,'class' => 'CCAdminControlPanel'),
'module' => array('enabled' => true,'class' => 'CCModules'),
'my' => array('enabled' => true,'class' => 'CCMycontroller'),
);
/**
* Define a routing table for urls.
*
* Route custom urls to a defined controller/method/arguments
*/
$ra->config['routing'] = array(
'home' => array('enabled' => true, 'url' => 'index/index'),
);
/**
* Define menus.
*
* Create hardcoded menus and map them to a theme region through $ra->config['theme'].
*/
$ra->config['menus'] = array(
'navbar' => array(
'home' => array('label'=>'Home', 'url'=>'home'),
'modules' => array('label'=>'Modules', 'url'=>'module'),
'content' => array('label'=>'Content', 'url'=>'content'),
'guestbook' => array('label'=>'Guestbook', 'url'=>'guestbook'),
'blog' => array('label'=>'Blog', 'url'=>'blog'),
),
'my-navbar' => array(
'home' => array('label'=>'About Me', 'url'=>'my'),
'blog' => array('label'=>'My Blog', 'url'=>'my/blog'),
'guestbook' => array('label'=>'Guestbook', 'url'=>'my/guestbook'),
'content' => array('label'=>'Content', 'url'=>'content'),
),
);
/**
* Settings for the theme. The theme may have a parent theme.
*
* When a parent theme is used the parent's functions.php will be included before the current
* theme's functions.php. The parent stylesheet can be included in the current stylesheet
* by an @import clause. See site/themes/mytheme for an example of a child/parent theme.
* Template files can reside in the parent or current theme, the CRama::ThemeEngineRender()
* looks for the template-file in the current theme first, then it looks in the parent theme.
*
* There are two useful theme helpers defined in themes/functions.php.
* theme_url($url): Prepends the current theme url to $url to make an absolute url.
* theme_parent_url($url): Prepends the parent theme url to $url to make an absolute url.
*
* path: Path to current theme, relativly RAMA_INSTALL_PATH, for example themes/grid or site/themes/mytheme.
* parent: Path to parent theme, same structure as 'path'. Can be left out or set to null.
* stylesheet: The stylesheet to include, always part of the current theme, use @import to include the parent stylesheet.
* template_file: Set the default template file, defaults to default.tpl.php.
* regions: Array with all regions that the theme supports.
* menu_to_region: Array mapping menus to regions.
* data: Array with data that is made available to the template file as variables.
*
* The name of the stylesheet is also appended to the data-array, as 'stylesheet' and made
* available to the template files.
*/
$ra->config['theme'] = array(
'path' => 'site/themes/mytheme',
//'path' => 'themes/grid',
'parent' => 'themes/grid',
'stylesheet' => 'style.css',
'template_file' => 'index.tpl.php',
'regions' => array('navbar', 'flash','featured-first','featured-middle','featured-last',
'primary','sidebar','triptych-first','triptych-middle','triptych-last',
'footer-column-one','footer-column-two','footer-column-three','footer-column-four',
'footer',
),
'menu_to_region' => array('my-navbar'=>'navbar'),
'data' => array(
'header' => 'Rama',
'slogan' => 'A PHP-based MVC-inspired CMF',
'favicon' => 'logo_80x80.png',
'logo' => 'logo_80x80.png',
'logo_width' => 80,
'logo_height' => 80,
'footer' => '<p>Rama by Mats Sandén (mazzan@masoft.se) | Based on CLydia by Mikael Roos (mos@dbwebb.se)</p>',
),
);
| {
"content_hash": "46e1695933bb70a2f22111aa450812f7",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 121,
"avg_line_length": 34.02717391304348,
"alnum_prop": 0.6292924452962786,
"repo_name": "mazzan/MVC",
"id": "c3ce2293c83295feb3b58550983724a752442146",
"size": "6262",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "site/config.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "25555"
},
{
"name": "JavaScript",
"bytes": "147"
},
{
"name": "PHP",
"bytes": "1063943"
}
],
"symlink_target": ""
} |
package org.basex.query.util.regex;
import org.basex.util.list.*;
/**
* Resizable-array implementation for regular expressions.
*
* @author BaseX Team 2005-16, BSD License
* @author Christian Gruen
*/
public final class RegExpList extends ElementList {
/** Element container. */
private RegExp[] list = new RegExp[1];
/**
* Adds an element to the array.
* @param element element to be added
* @return self reference
*/
public RegExpList add(final RegExp element) {
if(size == list.length) resize(newSize());
list[size++] = element;
return this;
}
/**
* Returns the specified element.
* @param p position
* @return value
*/
public RegExp get(final int p) {
return list[p];
}
/**
* Resizes the array.
* @param sz new size
*/
private void resize(final int sz) {
final RegExp[] tmp = new RegExp[sz];
System.arraycopy(list, 0, tmp, 0, size);
list = tmp;
}
/**
* Returns an array with all elements and invalidates the internal array.
* Warning: the function must only be called if the list is discarded afterwards.
* @return array (internal representation!)
*/
public RegExp[] finish() {
RegExp[] lst = list;
final int s = size;
if(s != lst.length) {
lst = new RegExp[s];
System.arraycopy(list, 0, lst, 0, s);
}
list = null;
return lst;
}
}
| {
"content_hash": "b9841bebeb5338b8d8a46ecbaac40299",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 83,
"avg_line_length": 23.05,
"alnum_prop": 0.6276211135213304,
"repo_name": "vincentml/basex",
"id": "c83b4d3855e6e1debdeca294ebe5e37bb2cbf186",
"size": "1383",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "basex-core/src/main/java/org/basex/query/util/regex/RegExpList.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "9372"
},
{
"name": "Batchfile",
"bytes": "2502"
},
{
"name": "C",
"bytes": "17146"
},
{
"name": "C#",
"bytes": "15568"
},
{
"name": "C++",
"bytes": "7796"
},
{
"name": "CSS",
"bytes": "3386"
},
{
"name": "Common Lisp",
"bytes": "3211"
},
{
"name": "HTML",
"bytes": "1057"
},
{
"name": "Haskell",
"bytes": "4065"
},
{
"name": "Java",
"bytes": "23540070"
},
{
"name": "JavaScript",
"bytes": "8881"
},
{
"name": "Makefile",
"bytes": "1234"
},
{
"name": "PHP",
"bytes": "8690"
},
{
"name": "Perl",
"bytes": "7801"
},
{
"name": "Python",
"bytes": "26123"
},
{
"name": "QMake",
"bytes": "377"
},
{
"name": "Rebol",
"bytes": "4731"
},
{
"name": "Ruby",
"bytes": "7359"
},
{
"name": "Scala",
"bytes": "11692"
},
{
"name": "Shell",
"bytes": "3557"
},
{
"name": "Visual Basic",
"bytes": "11957"
},
{
"name": "XQuery",
"bytes": "310803"
},
{
"name": "XSLT",
"bytes": "172"
}
],
"symlink_target": ""
} |
Current Version
-
3.0.0 September 25, 2019
- Update flake8 quote linting
- Add official support for Python 3.7
- Drop official support for Python 2.6, 3.2, 3.3
- Added `transactions` resource
2.7.0 June 21, 2018
- Added optional `time_field` argument to `client.accounts.transactions`
2.6.1 June 5, 2018
- Added `HTTPResponseError` to top-level import
- Remove `__all__` imports: they never worked
2.6.0 May 17, 2018
- Added `HTTPResponseError`
2.5.0 December 5, 2017
- Add links resource
2.4.0 July 19, 2017
- Add customers resource
2.3.0 June 22, 2017
- Add api_version to Client config
- Add tox
- Add support for python3.6
- Add flake8 to test suite
- Move /test directory inside package
- Migrate relative imports to package absolute
2.2.0 January 6, 2016
- Add merchants resource
2.1.0 January 4, 2017
- Add utils#is_webhook_authentic function
2.0.0 October 13, 2016
- Add accounts resource
- Add breaking changes to pybutton.Response class
1.1.0 October 4, 2016
- Add config options: hostname, port, secure, timeout
1.0.2 August 11, 2016
- Initial Release
| {
"content_hash": "0950d2817a81a4a72ec76b46d09bebad",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 74,
"avg_line_length": 23.354166666666668,
"alnum_prop": 0.7100802854594113,
"repo_name": "button/button-client-python",
"id": "5c615d2373248581978761d22bfebeb3d82c6088",
"size": "1121",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CHANGELOG.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "61683"
}
],
"symlink_target": ""
} |
import PropTypes from 'prop-types'
import React from 'react'
import propTypes from '../../prop-types'
import buildClassName from '../../lib/class-names'
const PageLayout = ({
classNames,
children,
footer,
minorBackground = false,
stickyFooter = false
}) => (
<div className={buildClassName('PageLayout', {minorBackground, stickyFooter}, classNames)}>
<main className="PageLayout__main">
<div className="PageLayout__content">{children}</div>
{footer && <div className="PageLayout__footer">{footer}</div>}
</main>
</div>
)
PageLayout.propTypes = {
...propTypes.component,
children: PropTypes.node,
footer: PropTypes.node,
stickyFooter: PropTypes.bool,
minorBackground: PropTypes.bool
}
export default PageLayout
| {
"content_hash": "829d15c50b8cd8c3f517ce982e953394",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 93,
"avg_line_length": 25.266666666666666,
"alnum_prop": 0.7018469656992085,
"repo_name": "all3dp/printing-engine-client",
"id": "dee21c8dde9d6b8f3b239e0615e1c01c0bffb110",
"size": "758",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/app/component/page-layout/page-layout.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "101386"
},
{
"name": "HTML",
"bytes": "3839"
},
{
"name": "JavaScript",
"bytes": "655542"
},
{
"name": "Perl",
"bytes": "811"
},
{
"name": "Perl 6",
"bytes": "1338"
},
{
"name": "Shell",
"bytes": "429"
},
{
"name": "TypeScript",
"bytes": "159344"
}
],
"symlink_target": ""
} |
from __future__ import division, print_function
import numpy as np
from scipy import linalg as la
from .point_cloud import PointCloud
try:
import mklfft as fft
except ImportError:
import numpy.fft as fft
def tdoa_loc(R, tdoa, c, x0=None):
"""
TDOA based localization
Parameters
----------
R : ndarray
A 3xN array of 3D points
tdoa : ndarray
A length N array of tdoa
c : float
The speed of sound
Reference
---------
Steven Li, TDOA localization
"""
tau = tdoa - tdoa[0]
# eliminate 0 tdoa
I = tau != 0.0
I[0] = True # keep mic 0! (reference, tdoa always 0)
tau = tau[I]
R = R[:, I]
# Need two ref points
r0 = R[:, 0:1]
r1 = R[:, 1:2]
rm = R[:, 2:]
n0 = la.norm(r0) ** 2
n1 = la.norm(r1) ** 2
nm = la.norm(rm, axis=0) ** 2
# Build system matrices
# Steven Li's equations
ABC = 2 * (rm - r0) / (c * tau[2:]) - 2 * (r1 - r0) / (c * tau[1])
D = c * tau[1] - c * tau[2:] + (nm - n0) / (c * tau[2:]) - (n1 - n0) / (c * tau[1])
loc = la.lstsq(ABC.T, D)[0]
"""
from scipy.optimize import leastsq
def f(r, *args):
R = args[0]
c = args[1]
tdoa = args[2]
res = la.norm(R - r[:3,None], axis=0) - (r[3] + c * tau)
return res
def Jf(r, *args):
R = args[0]
c = args[1]
tdoa = args[2]
delta = r[:3,None] - R
norm = la.norm(delta, axis=0)
J = np.zeros((R.shape[0]+1, R.shape[1]))
J[:3,:] = (delta / norm)
J[3,:] = -1.
return J
init = f(x0, R[:,1:], c, tdoa[1:])
sol = leastsq(f, x0, args=(R[:,1:],c,tdoa[1:]), Dfun=Jf, full_output=True, maxfev=10000, col_deriv=True)
print sol[2]['nfev']
print sol[1]
print np.sum(f(sol[0], R[:,1:], c, tdoa[1:])**2) / np.sum(init**2)
loc = sol[0][:3]
print 'distance offset',sol[0][3]
"""
return loc
def tdoa(x1, x2, interp=1, fs=1, phat=True):
"""
This function computes the time difference of arrival (TDOA)
of the signal at the two microphones. This in turns is used to infer
the direction of arrival (DOA) of the signal.
Specifically if s(k) is the signal at the reference microphone and
s_2(k) at the second microphone, then for signal arriving with DOA
theta we have
s_2(k) = s(k - tau)
with
tau = fs*d*sin(theta)/c
where d is the distance between the two microphones and c the speed of sound.
We recover tau using the Generalized Cross Correlation - Phase Transform (GCC-PHAT)
method. The reference is
Knapp, C., & Carter, G. C. (1976). The generalized correlation method for estimation of time delay.
Parameters
----------
x1 : nd-array
The signal of the reference microphone
x2 : nd-array
The signal of the second microphone
interp : int, optional (default 1)
The interpolation value for the cross-correlation, it can
improve the time resolution (and hence DOA resolution)
fs : int, optional (default 44100 Hz)
The sampling frequency of the input signal
Return
------
theta : float
the angle of arrival (in radian (I think))
pwr : float
the magnitude of the maximum cross correlation coefficient
delay : float
the delay between the two microphones (in seconds)
"""
# zero padded length for the FFT
n = x1.shape[0] + x2.shape[0] - 1
if n % 2 != 0:
n += 1
# Generalized Cross Correlation Phase Transform
# Used to find the delay between the two microphones
# up to line 71
X1 = fft.rfft(np.array(x1, dtype=np.float32), n=n)
X2 = fft.rfft(np.array(x2, dtype=np.float32), n=n)
if phat:
X1 /= np.abs(X1)
X2 /= np.abs(X2)
cc = fft.irfft(X1 * np.conj(X2), n=interp * n)
# maximum possible delay given distance between microphones
t_max = n // 2 + 1
# reorder the cross-correlation coefficients
cc = np.concatenate((cc[-t_max:], cc[:t_max]))
# pick max cross correlation index as delay
tau = np.argmax(np.abs(cc))
pwr = np.abs(cc[tau])
tau -= t_max # because zero time is at the center of the array
return tau / (fs * interp)
def edm_line_search(R, tdoa, bounds, steps):
"""
We have a number of points of know locations and have the TDOA measurements
from an unknown location to the known point.
We perform an EDM line search to find the unknown offset to turn TDOA to TOA.
Parameters
----------
R : ndarray
An ndarray of 3xN where each column is the location of a point
tdoa : ndarray
A length N vector containing the tdoa measurements from uknown location to known ones
bounds : ndarray
Bounds for the line search
step : float
Step size for the line search
"""
dim = R.shape[0]
pc = PointCloud(X=R)
# use point 0 as reference
dif = tdoa - tdoa.min()
# initialize EDM
D = np.zeros((pc.m + 1, pc.m + 1))
D[:-1, :-1] = pc.EDM()
# distance offset to search
d = np.linspace(bounds[0], bounds[1], steps)
# sum of eigenvalues that should be zero
# cost = np.zeros((d.shape[0], D.shape[0]))
cost = np.zeros(*d.shape)
for i in range(d.shape[0]):
D[-1, :-1] = D[:-1, -1] = (dif + d[i]) ** 2
w = np.sort(np.abs(la.eigh(D, eigvals_only=True)))
# w = la.eigh(D, eigvals_only=True, eigvals=(D.shape[0]-6,D.shape[0]-6))
cost[i] = np.sum(w[: D.shape[0] - 5])
return cost, d
| {
"content_hash": "f1ea7ef47f515aab822a8030b4814e6c",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 108,
"avg_line_length": 26.183098591549296,
"alnum_prop": 0.5752196521427291,
"repo_name": "LCAV/pyroomacoustics",
"id": "73cd63b96c21651ae7b02457c937db5b637171cc",
"size": "5577",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyroomacoustics/experimental/localization.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "96552"
},
{
"name": "Cython",
"bytes": "2700"
},
{
"name": "Dockerfile",
"bytes": "735"
},
{
"name": "Python",
"bytes": "941773"
}
],
"symlink_target": ""
} |
class TheanoConfig(object):
floatX = 'float32'
config = TheanoConfig() | {
"content_hash": "656a87f758c8d81b280591d53ae07f0f",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 27,
"avg_line_length": 18.75,
"alnum_prop": 0.72,
"repo_name": "neopenx/Dragon",
"id": "71609ddbab97df750bff315fc7a3a62473775734",
"size": "264",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Dragon/python/dragon/vm/theano/configdefaults.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "7082"
},
{
"name": "C++",
"bytes": "1024612"
},
{
"name": "CMake",
"bytes": "7849"
},
{
"name": "Cuda",
"bytes": "246400"
},
{
"name": "Makefile",
"bytes": "7409"
},
{
"name": "Python",
"bytes": "552459"
}
],
"symlink_target": ""
} |
app.controller("cicloController", function($scope, cicloService, $window) {
$scope.np_list = [];
list = function() {
cicloService.list().then(function(r) {
$scope.np_list = r.data;
}, function(error) {
console.log("Error " + error.data.message);
})
}
list();
$scope.sel = function(d) {
$scope.ciclo = d;
};
$scope.save = function() {
if ($scope.ciclo.id) {
cicloService.update({ id: "" }, $scope.ciclo).then(function(r) {
console.log(r.data);
list();
}, function(error) {
console.log("Error " + error.data.message);
});
} else {
cicloService.create($scope.ciclo).then(function(r) {
console.log(r.data);
list();
}, function(error) {
console.log("Error " + error.data.message);
});
};
};
$scope.delete = function(d){
if ($window.confirm('Confirm delete')) {
cicloService.delete({ "id": d.id }).then(function (r) {
console.log(r.data);
list();
}, function (error) {
console.log(error.data.message);
});
};
};
}); | {
"content_hash": "a5e9ea90326166c789a94e157962dffc",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 76,
"avg_line_length": 27.78723404255319,
"alnum_prop": 0.45176110260336905,
"repo_name": "lp2dev/HOMEND",
"id": "e327ec28313be008da939b1031e6e8f0b45e195b",
"size": "1306",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "homend_web/app/controllers/cicloController.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "1356"
},
{
"name": "CSS",
"bytes": "49005"
},
{
"name": "HTML",
"bytes": "57982"
},
{
"name": "JavaScript",
"bytes": "71039"
},
{
"name": "Python",
"bytes": "40291"
}
],
"symlink_target": ""
} |
Simple and reliable support for [mocha](https://github.com/visionmedia/mocha) testing with Appcelerator's [Titanium](http://www.appcelerator.com/titanium/) SDK.
## Full Documentation and Samples
[http://tonylukasavage.com/ti-mocha/](http://tonylukasavage.com/ti-mocha/)
## Quick Start [![NPM version](https://badge.fury.io/js/ti-mocha.png)](http://badge.fury.io/js/ti-mocha)
```
cd /path/to/Titanium/project && npm install ti-mocha --prefix ./node_modules
```
## Contributing [![Build Status](https://travis-ci.org/tonylukasavage/ti-mocha.png?branch=master)](https://travis-ci.org/tonylukasavage/ti-mocha) [![Built with Grunt](https://cdn.gruntjs.com/builtwith.png)](http://gruntjs.com/)
1. Install [node.js](http://nodejs.org/).
2. Install [grunt](http://gruntjs.com/): `[sudo] npm install -g grunt-cli`
3. `git clone https://github.com/tonylukasavage/ti-mocha.git && cd ti-mocha && npm install`
#### Basic Build
```
grunt
```
This process will generate a new `./ti-mocha.js` file based on the files in `src`, as well as the source mocha.js file found at `./node_modules/mocha/mocha.js` after you execute `npm install`. See [lib/build.js](lib/build.js) for details of the build process. Please note that _no_ modifications are made directly to the source mocha.js file.
#### Build for Different Version of Mocha
1. Change version of mocha in the package.json `devDependencies`
2. `npm install`
3. `grunt`
## Issues
Please report issues, new features/reporters, or requests in this repo's [issue tracker](https://github.com/tonylukasavage/ti-mocha/issues). Bear in mind that this is a straight-up, minimal porting effort to make mocha work with Titanium. If you want additional features or functionality in mocha itself, please report them in the [mocha](https://github.com/visionmedia/mocha) repository.
## License
Distributed under [MIT License](LICENSE).
| {
"content_hash": "faea7e493b5c99577748eb5a652b0582",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 388,
"avg_line_length": 48.02564102564103,
"alnum_prop": 0.7383876134543513,
"repo_name": "CodexLabs/ti-mocha",
"id": "1a9e8e1c6162b0c266b55e2fb139268166e777de",
"size": "2020",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "176158"
}
],
"symlink_target": ""
} |
package eu.seaclouds.platform.planner.optimizer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SuitableOptions { // implements Iterable<List<String>
static Logger log = LoggerFactory
.getLogger(SuitableOptions.class);
private static final double COMPARATOR_LIMIT = 1000.0;
private ArrayList<String> moduleNames;
private ArrayList<List<String>> suitableOptionsNames;
private ArrayList<List<CloudOffer>> suitableOptionsCharacteristics;
private double latencyInternetMillis;
private double latencyDatacenterMillis;
public SuitableOptions() {
moduleNames = new ArrayList<String>();
suitableOptionsNames = new ArrayList<List<String>>();
suitableOptionsCharacteristics = new ArrayList<List<CloudOffer>>();
}
public void addSuitableOptions(String moduleName, List<String> optionsNames,
List<CloudOffer> OptionsCharacteristics) {
moduleNames.add(moduleName);
suitableOptionsNames.add(optionsNames);
suitableOptionsCharacteristics.add(OptionsCharacteristics);
}
public int getSizeOfSuitableOptions(String moduleName) {
int i = 0;
boolean found = false;
while ((i < moduleNames.size()) && (!found)) {
if (moduleNames.get(i).equalsIgnoreCase(moduleName)) {
found = true;
} else {
i++;
}
}
if (found) {
return suitableOptionsNames.get(i).size();
} else {
System.out
.println("getSuitableOptions@SuitableOptions: Error, name of module not found: Return -1");
}
return -1;
}
@Override
public SuitableOptions clone() {
SuitableOptions cloned = new SuitableOptions();
// Clone moduleName
for (String moduleName : moduleNames) {
cloned.moduleNames.add(moduleName);
}
// Clone suitableOptionsNames
for (List<String> l : suitableOptionsNames) {
List<String> clonedList = (List<String>) new ArrayList<String>();
for (String option : l) {
clonedList.add(option);
}
cloned.suitableOptionsNames.add(clonedList);
}
// Clone suitableOptionsCharacteristics
for (List<CloudOffer> l : suitableOptionsCharacteristics) {
List<CloudOffer> clonedList2 = (List<CloudOffer>) new ArrayList<CloudOffer>();
for (CloudOffer option : l) {
clonedList2.add(option.clone());
}
cloned.suitableOptionsCharacteristics.add(clonedList2);
}
cloned.setLatencyInternetMillis(latencyInternetMillis);
cloned.setLatencyDatacenterMillis(latencyDatacenterMillis);
return cloned;
}
public String getIthSuitableOptionForModuleName(String moduleName,
int optionPosition) {
int i = 0;
boolean found = false;
while ((i < moduleNames.size()) && (!found)) {
if (moduleNames.get(i).equalsIgnoreCase(moduleName)) {
found = true;
} else {
i++;
}
}
if (found) {
// if module found and there exist suitable options for it (i.e., if
// suitableOptions.get(i).size()>0).
if (suitableOptionsNames.get(i).size() > 0) {
return suitableOptionsNames.get(i).get(optionPosition);
} else {
return null;
}
} else {
System.out
.println("getIthSuitableOptionForModuleName@SuitableOptions: Error, name of module not found: Return NULL");
}
return null;
}
// ITERATOR OVER THE ELEMENTS
abstract class AbstractIterator<T> implements Iterable<T>, Iterator<T> {
int currentIndex = 0;
@Override
public boolean hasNext() {
return currentIndex < moduleNames.size();
}
@Override
public void remove() {
}
@Override
public Iterator<T> iterator() {
return this;
}
}
class ListIterator extends AbstractIterator<List<String>> {
@Override
public List<String> next() {
List<String> currentList = suitableOptionsNames.get(currentIndex);
currentIndex++;
return currentList;
}
}
class StringIterator extends AbstractIterator<String> {
@Override
public String next() {
String currentModName = moduleNames.get(currentIndex);
currentIndex++;
return currentModName;
}
}
public Iterable<List<String>> getListIterator() {
return new ListIterator();
}
public Iterable<String> getStringIterator() {
return new StringIterator();
}
/**
* @param moduleName
* @param cloudOptionNameForModule
* @return The characteristics of cloudOptionNameForModule cloud for module
* with name moduleName
*/
public CloudOffer getCloudCharacteristics(String moduleName,
String cloudOptionNameForModule) {
// Find index of moduleName
boolean found = false;
int indexModule = 0;
while ((indexModule < moduleNames.size()) && (!found)) {
if (moduleNames.get(indexModule).equals(moduleName)) {
found = true;
} else {
indexModule++;
}
}
// TODO: Merge this previous part with a getCloudOffersForModule() method.
// if it is not found, there will probably be an error later. Now advise.
// We cannot repair the situation here, just be aware.
if (!found) {
log.warn("Module in topology named '"+moduleName+"' has not been found as module with some suitable optoins. ERROR ahead!");
}
found = false;
int indexCloudOffer = 0;
while ((indexCloudOffer < suitableOptionsCharacteristics.get(indexModule)
.size()) && (!found)) {
if (suitableOptionsCharacteristics.get(indexModule)
.get(indexCloudOffer).getName().equals(cloudOptionNameForModule)) {
found = true;
} else {
indexCloudOffer++;
}
}
if (!found) {
log.warn("Chosen cloud option (i.e,. "+cloudOptionNameForModule+") in solution has not been found as possibility"
+ " for the suitable options for the module ("+moduleName+"). ERROR ahead!");
}
return suitableOptionsCharacteristics.get(indexModule).get(
indexCloudOffer);
}
public void sortDescendingPerformance() {
for (int i = 0; i < suitableOptionsCharacteristics.size(); i++) {
sortDescendingPerformanceOptionsForIthModule(i);
}
}
private void sortDescendingPerformanceOptionsForIthModule(int listIndex) {
List<CloudOffer> listToSort = suitableOptionsCharacteristics
.get(listIndex);
Collections.sort(listToSort,
new CloudOptionReversePerformanceComparator());
// Replace the list of names to be present in the same order as in
// suitableoptionscharacteristics
for (int i = 0; i < listToSort.size(); i++) {
suitableOptionsNames.get(listIndex)
.set(i, listToSort.get(i).getName());
}
}
// ///////////////////////////
// CLASSES FOR COMPARATOR
// ///////////////////////////
class CloudOptionPerformanceComparator implements Comparator<CloudOffer> {
@Override
public int compare(CloudOffer o1, CloudOffer o2) {
return (int) ((o1.getPerformance() * COMPARATOR_LIMIT) - (o2
.getPerformance() * COMPARATOR_LIMIT));
}
}
class CloudOptionReversePerformanceComparator implements
Comparator<CloudOffer> {
@Override
public int compare(CloudOffer o1, CloudOffer o2) {
return (int) ((o2.getPerformance() * COMPARATOR_LIMIT) - (o1
.getPerformance() * COMPARATOR_LIMIT));
}
}
// ///////////////////////////
// END OF CLASSES FOR COMPARATOR
// ///////////////////////////
/**
* @param modulename
* @param cloudOffer
* @return whether exists a worse offer in terms of performance of the same
* provider than cloudOffer.
* */
public boolean existsOfferWithWorsePerformanceOfSameProvider(
String modulename, String cloudOffer) {
List<CloudOffer> offers = getCloudOffersForModule(modulename);
CloudOffer currentOffer = getCloudCharacteristicsFromList(offers,
cloudOffer);
for (CloudOffer offer : offers) {
// series of conditions in AND that I prefer to nest for visibility
if (offer.getPerformance() < currentOffer.getPerformance()) {
if (offer.getProviderName().equals(currentOffer.getProviderName())) {
return true;
}
}
}
return false;
}
/**
* @param modulename
* @param currentCloudOffer
* @return The offer in terms of performance of the same provider that is
* immediately worse than cloudOffer. It is assumed that
* ArraysOfcloudOffer are ordered by
* "CloudOptionReversePerformanceComparator"
*/
public CloudOffer getOfferImmediateLowerPerformanceOfSameProvider(
String modulename, String cloudOffer) {
List<CloudOffer> offers = getCloudOffersForModule(modulename);
CloudOffer currentOffer = getCloudCharacteristicsFromList(offers,
cloudOffer);
for (CloudOffer offer : offers) { // assumed that are ordered in reverse
// order
if (offer.getPerformance() < currentOffer.getPerformance()) {
if (offer.getProviderName().equals(currentOffer.getProviderName())) {
return offer; // The first one in an ordered traverse must be the
// chosen one
}
}
}
return null;
}
/**
* @param modulename
* @param currentCloudOffer
* @return whether exists a better offer in terms of performance of the same
* provider than cloudOffer. It is assumed that ArraysOfcloudOffer
* are ordered by "CloudOptionReversePerformanceComparator"
*/
public boolean existsOfferWithBetterPerformanceOfSameProvider(
String modulename, String cloudOffer) {
List<CloudOffer> offers = getCloudOffersForModule(modulename);
CloudOffer currentOffer = getCloudCharacteristicsFromList(offers,
cloudOffer);
for (CloudOffer offer : offers) {
// series of conditions in AND that I prefer to nest for visibility
if (offer.getPerformance() > currentOffer.getPerformance()) {
if (offer.getProviderName().equals(currentOffer.getProviderName())) {
return true;
}
}
}
return false;
}
/**
* @param modulename
* @param currentCloudOffer
* @return The offer in terms of performance of the same provider that is
* immediately better than cloudOffer. It is assumed that
* ArraysOfcloudOffer are ordered by
* "CloudOptionReversePerformanceComparator"
*/
public CloudOffer getOfferImmediateHigherPerformanceOfSameProvider(
String modulename, String cloudOffer) {
List<CloudOffer> offers = getCloudOffersForModule(modulename);
CloudOffer currentOffer = getCloudCharacteristicsFromList(offers,
cloudOffer);
CloudOffer potentialBetter = null;
for (CloudOffer offer : offers) { // assumed that are ordered in reverse
// order
if (offer.getPerformance() > currentOffer.getPerformance()) {
if (offer.getProviderName().equals(currentOffer.getProviderName())) {
potentialBetter = offer;
}
} else {// not better. So the rest are not better either.
return potentialBetter;
}
}
return potentialBetter;
}
public boolean existsAlternativeCloudProviderForModuleWithHigherAvailability(
String modulename, String cloudOffer) {
List<CloudOffer> offers = getCloudOffersForModule(modulename);
CloudOffer currentOffer = getCloudCharacteristicsFromList(offers,
cloudOffer);
for (CloudOffer offer : offers) {
// series of conditions in AND that I prefer to nest for visibility
if (offer.getAvailability() > currentOffer.getAvailability()) {
if (!(offer.getProviderName()
.equals(currentOffer.getProviderName()))) {
return true;
}
}
}
return false;
}
public CloudOffer getOfferImmediateHigherAvailabilityOfSameProviderSimilarPerformance(
String modulename, String cloudOffer) {
List<CloudOffer> offers = getCloudOffersForModule(modulename);
CloudOffer currentOffer = getCloudCharacteristicsFromList(offers,
cloudOffer);
CloudOffer potentialBetter = null;
for (CloudOffer offer : offers) { // assumed that are ordered in reverse
// order
if (offer.getAvailability() > currentOffer.getAvailability()) {
if (!(offer.getProviderName()
.equals(currentOffer.getProviderName()))) {
if (potentialBetter == null) {// there was not found any yet
potentialBetter = offer;
} else {// An alternative offer was already found, now check by
// its performance (the less difference)
if (Math.abs(currentOffer.getPerformance()
- offer.getPerformance()) < (Math.abs(currentOffer
.getPerformance() - potentialBetter.getPerformance()))) {
// Closest difference
potentialBetter = offer;
}
}
}
}
}
return potentialBetter;
}
public boolean existsAlternativeCloudProviderForModuleWithLowerAvailability(
String modulename, String cloudOffer) {
List<CloudOffer> offers = getCloudOffersForModule(modulename);
CloudOffer currentOffer = getCloudCharacteristicsFromList(offers,
cloudOffer);
for (CloudOffer offer : offers) {
// series of conditions in AND that I prefer to nest for visibility
if (offer.getAvailability() < currentOffer.getAvailability()) {
if (!(offer.getProviderName()
.equals(currentOffer.getProviderName()))) {
return true;
}
}
}
return false;
}
public CloudOffer getOfferImmediateLowerAvailabilityOfSameProviderSimilarPerformance(
String modulename, String cloudOffer) {
List<CloudOffer> offers = getCloudOffersForModule(modulename);
CloudOffer currentOffer = getCloudCharacteristicsFromList(offers,
cloudOffer);
CloudOffer potentialWorse = null;
for (CloudOffer offer : offers) { // assumed that are ordered in reverse
// order
if (offer.getAvailability() < currentOffer.getAvailability()) {
if (!(offer.getProviderName()
.equals(currentOffer.getProviderName()))) {
if (potentialWorse == null) {// there was not found any yet
potentialWorse = offer;
} else {// An alternative offer was already found, now check by
// its performance (the less difference)
if (Math.abs(currentOffer.getPerformance()
- offer.getPerformance()) < (Math.abs(currentOffer
.getPerformance() - potentialWorse.getPerformance()))) {
// Closest difference
potentialWorse = offer;
}
}
}
}
}
return potentialWorse;
}
private CloudOffer getCloudCharacteristicsFromList(List<CloudOffer> offers,
String cloudOffer) {
boolean found = false;
int indexCloudOffer = 0;
while ((indexCloudOffer < offers.size()) && (!found)) {
if (offers.get(indexCloudOffer).getName().equals(cloudOffer)) {
found = true;
} else {
indexCloudOffer++;
}
}
if (!found) {
log.warn("Chosen cloud option in solution has not been found as possibility for the suitable options for the module. ERROR ahead!");
}
return offers.get(indexCloudOffer);
}
private List<CloudOffer> getCloudOffersForModule(String modulename) {
// Find index of moduleName
boolean found = false;
int indexModule = 0;
while ((indexModule < moduleNames.size()) && (!found)) {
if (moduleNames.get(indexModule).equals(modulename)) {
found = true;
} else {
indexModule++;
}
}
// if it is not found, there will probably be an error later. Now advise.
// We cannot repair the situation here, just be aware.
if (!found) {
log.warn("Module in topology has not been found as module with some suitable optoins. ERROR ahead!");
return null;
}
return suitableOptionsCharacteristics.get(indexModule);
}
public double getLatencyIntraDatacenterMillis() {
return latencyDatacenterMillis;
}
public double getLatencyInterCloudMillis() {
return latencyInternetMillis;
}
public double getLatencyIntraDatacenterSec() {
return getLatencyIntraDatacenterMillis() / 1000.0;
}
public double getLatencyInterCloudSec() {
return getLatencyInterCloudMillis() / 1000.0;
}
public void setLatencyInternetMillis(double latencyInternetMillis) {
this.latencyInternetMillis = latencyInternetMillis;
}
public void setLatencyDatacenterMillis(double latencyDatacenterMillis) {
this.latencyDatacenterMillis = latencyDatacenterMillis;
}
}
| {
"content_hash": "ba96ee7992a6ecc6a192a064e62d7c87",
"timestamp": "",
"source": "github",
"line_count": 556,
"max_line_length": 141,
"avg_line_length": 32.81294964028777,
"alnum_prop": 0.6140100855075641,
"repo_name": "rosogon/SeaCloudsPlatform",
"id": "969b0b83ec5e3d15b2d65b16b95f875aac5e0314",
"size": "18886",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "planner/optimizer/optimizer-core/src/main/java/eu/seaclouds/platform/planner/optimizer/SuitableOptions.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2589"
},
{
"name": "CSS",
"bytes": "7750"
},
{
"name": "HTML",
"bytes": "68152"
},
{
"name": "Java",
"bytes": "2241478"
},
{
"name": "JavaScript",
"bytes": "53011"
},
{
"name": "PHP",
"bytes": "905"
},
{
"name": "Python",
"bytes": "1493"
},
{
"name": "Ruby",
"bytes": "1292"
},
{
"name": "Shell",
"bytes": "23889"
}
],
"symlink_target": ""
} |
class Redis::Zset
include Redis::FieldProxy #:nodoc:
def add(value, score); redis.zset_add(key, score, marshal.to_redis(value)) end
def remove(value); redis.zset_delete(key, marshal.to_redis(value)) end
def range(start = 0 , stop = -1); redis.zset_range(key, star, stop) end
def reverse_range(start = 0 , stop = -1); redis.zset_reverse_range(key, start, stop) end
def incr(value, amount = 1); redis.zset_increment_by(key, amount, marshal.to_redis(value)) end
def by_score(start, stop); redis.zset_range_by_score(key, start, stop) end
def score(value); redis.zset_score(key, marshal.to_redis(value)) end
def count; redis.zset_count(key) end
alias :count :size
def to_s; range.join(', ') end
def get; self end
def set(value)
value.each{|item| redis.sadd(marshal.to_redis(item)) }
end
protected
def translate_method_name(m); COMMANDS[m] end
end | {
"content_hash": "ed16fb9e86a10964579e561b9faad0a0",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 96,
"avg_line_length": 28.636363636363637,
"alnum_prop": 0.6486772486772486,
"repo_name": "BrianTheCoder/redis-types",
"id": "2df77df4c82c772b9c20a449d9f35dffbbc892b8",
"size": "945",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/redis/zset.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "14828"
}
],
"symlink_target": ""
} |
/**
* \usergroup{SceSystemParam}
* \usage{psp2/system_param.h}
*/
#ifndef _PSP2_SYSTEM_PARAM_H_
#define _PSP2_SYSTEM_PARAM_H_
#ifdef __cplusplus
extern "C" {
#endif
/** System param id */
typedef enum SceSystemParamId {
//!< Language settings
SCE_SYSTEM_PARAM_ID_LANG = 1,
//!< Enter button assignment
SCE_SYSTEM_PARAM_ID_ENTER_BUTTON,
//!< Username string
SCE_SYSTEM_PARAM_ID_USERNAME,
//!< Date format
SCE_SYSTEM_PARAM_ID_DATE_FORMAT,
//!< Time format
SCE_SYSTEM_PARAM_ID_TIME_FORMAT,
//!< Time zone
SCE_SYSTEM_PARAM_ID_TIME_ZONE,
//!< Daylight savings time (0 = Disabled, 1 = Enabled)
SCE_SYSTEM_PARAM_ID_DAYLIGHT_SAVINGS,
//!< Max allowed value
SCE_SYSTEM_PARAM_ID_MAX_VALUE = 0xFFFFFFFF
} SceSystemParamId;
/** Language settings */
typedef enum SceSystemParamLang {
//! Japanese
SCE_SYSTEM_PARAM_LANG_JAPANESE,
//! American English
SCE_SYSTEM_PARAM_LANG_ENGLISH_US,
//! French
SCE_SYSTEM_PARAM_LANG_FRENCH,
//! Spanish
SCE_SYSTEM_PARAM_LANG_SPANISH,
//! German
SCE_SYSTEM_PARAM_LANG_GERMAN,
//! Italian
SCE_SYSTEM_PARAM_LANG_ITALIAN,
//! Dutch
SCE_SYSTEM_PARAM_LANG_DUTCH,
//! Portugal Portuguese
SCE_SYSTEM_PARAM_LANG_PORTUGUESE_PT,
//! Russian
SCE_SYSTEM_PARAM_LANG_RUSSIAN,
//! Korean
SCE_SYSTEM_PARAM_LANG_KOREAN,
//! Traditional Chinese
SCE_SYSTEM_PARAM_LANG_CHINESE_T,
//! Simplified Chinese
SCE_SYSTEM_PARAM_LANG_CHINESE_S,
//! Finnish
SCE_SYSTEM_PARAM_LANG_FINNISH,
//! Swedish
SCE_SYSTEM_PARAM_LANG_SWEDISH,
//! Danish
SCE_SYSTEM_PARAM_LANG_DANISH,
//! Norwegian
SCE_SYSTEM_PARAM_LANG_NORWEGIAN,
//! Polish
SCE_SYSTEM_PARAM_LANG_POLISH,
//! Brazil Portuguese
SCE_SYSTEM_PARAM_LANG_PORTUGUESE_BR,
//! British English
SCE_SYSTEM_PARAM_LANG_ENGLISH_GB,
//! Turkish
SCE_SYSTEM_PARAM_LANG_TURKISH,
//! Max allowed value
SCE_SYSTEM_PARAM_LANG_MAX_VALUE = 0xFFFFFFFF
} SceSystemParamLang;
/** Assignment of enter button */
typedef enum SceSystemParamEnterButtonAssign {
SCE_SYSTEM_PARAM_ENTER_BUTTON_CIRCLE,
SCE_SYSTEM_PARAM_ENTER_BUTTON_CROSS,
SCE_SYSTEM_PARAM_ENTER_BUTTON_MAX_VALUE = 0xFFFFFFFF
} SceSystemParamEnterButtonAssign;
/* Username */
#define SCE_SYSTEM_PARAM_USERNAME_MAXSIZE (17) //!< Max size of username
/** Date display format */
typedef enum SceSystemParamDateFormat {
SCE_SYSTEM_PARAM_DATE_FORMAT_YYYYMMDD, //!< Year/Month/Day
SCE_SYSTEM_PARAM_DATE_FORMAT_DDMMYYYY, //!< Day/Month/Year
SCE_SYSTEM_PARAM_DATE_FORMAT_MMDDYYYY //!< Month/Day/Year
} SceSystemParamDateFormat;
/** Time display format */
typedef enum SceSystemParamTimeFormat {
SCE_SYSTEM_PARAM_TIME_FORMAT_12HR, //!< 12-hour clock
SCE_SYSTEM_PARAM_TIME_FORMAT_24HR //!< 24-hour clock
} SceSystemParamTimeFormat;
#ifdef __cplusplus
}
#endif
#endif /* _PSP2_SYSTEM_PARAM_H_ */
| {
"content_hash": "5ef6ff5407688fe42e1f02d0f104f117",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 72,
"avg_line_length": 25.64485981308411,
"alnum_prop": 0.7270408163265306,
"repo_name": "vitasdk/vita-headers",
"id": "4cbbc7b1b8df6d75c04ac9bb4fb94b62d4b01d09",
"size": "2744",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "include/psp2/system_param.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "895738"
},
{
"name": "C++",
"bytes": "26074"
},
{
"name": "Python",
"bytes": "7775"
},
{
"name": "Shell",
"bytes": "2035"
}
],
"symlink_target": ""
} |
Title: SIRA December 2015 Webinar - Real World Reconnaisance Costs: A metric
Date: 2015-12-03 12:00:00
Category: News
Tags: webinar
Slug: reconnaisance-risk-webinar
Author: SIRA
<center>
SIRA Members are invited to attend our next 2015 webinar on<br/><br/>
<b>Real World Reconnaisance Costs: A metric</b><br/><br/>
<b>Friday, December 11, 2015 - 12:00EDT</b><br/><br/>
featuring Eireann Leverett<br/>
</center>
This talk is for you if:
- You measure what you can, and don't beg for data anymore.
- You are already interested in security economics.
- You want to support a hacker who is trying to quant better.
- You need a way to get vulnerability at scale across to policy people.
- You want a new tool in the toolbox, and you like finding use cases for new tools.
Webinar seating is *limited* and paid members get dibbs if it gets full. The session will be recorded for future viewing by SIRA paid members. You can find out more about SIRA memberships over at the SIRA website.
Use the [following link](https://zoom.us/webinar/register/f7e91bfba41d5bd7dc2040ba88984b7b) to register. You will receive a confirmation after your SIRA membership has been verified.
<center><span style="font-size:9pt">Contact [webinars@societyinforisk.org](mailto:webinars@societyinforisk.org) with any questions/inquiries.</span></center>
<hr noshade size="1"/>
<b>Eireann Leverett</b> is a risk researcher at the University of Cambridge Centre for Risk Studies has studied psychology, philosophy, artificial intelligence, software engineering, and computer security at various times in his life. He holds a BEng from Edinburgh Univesity and an MPhil from the University of Cambridge in Advanced Computer Science. He still enjoys punting at Darwin College when he has the time.
At the Centre for Risk Studies his research focuses upon technological disasters and the economic impacts of computer security failures or accidents. He has experience of compromising the security of organisations, and assisting them to improve their security postures through a variety of short and long term methods. He is interested in computer security at scale, security economics, systems security, incident response, critical infrastructure protection, safety, firmware signing, exploit markets, vulnerability management, quality assurance, indicators of compromise, modelling, networks, risk, visualisations, and zero knowledge proofs. He is a frequent public speaker on these subjects. | {
"content_hash": "f9821bd7c48f20bd67400a468cce837e",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 694,
"avg_line_length": 74.66666666666667,
"alnum_prop": 0.7954545454545454,
"repo_name": "societyinforisk/blog",
"id": "f2a082147345216d1f2f650518e84462312d85b6",
"size": "2464",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "content/blog/2015-12-webinar.md",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_131) on Mon Nov 06 19:55:12 GMT 2017 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>QMazeGrid</title>
<meta name="date" content="2017-11-06">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="QMazeGrid";
}
}
catch(err) {
}
//-->
var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10};
var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/QMazeGrid.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Class</li>
<li><a href="../../../qmaze/View/MazeComponents/QMazeRoom.html" title="class in qmaze.View.MazeComponents"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?qmaze/View/MazeComponents/QMazeGrid.html" target="_top">Frames</a></li>
<li><a href="QMazeGrid.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li><a href="#fields.inherited.from.class.qmaze.View.Components.Component">Field</a> | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">qmaze.View.MazeComponents</div>
<h2 title="Class QMazeGrid" class="title">Class QMazeGrid</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li><a href="../../../qmaze/View/Components/Component.html" title="class in qmaze.View.Components">qmaze.View.Components.Component</a></li>
<li>
<ul class="inheritance">
<li>qmaze.View.MazeComponents.QMazeGrid</li>
</ul>
</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<hr>
<br>
<pre>public class <span class="typeNameLabel">QMazeGrid</span>
extends <a href="../../../qmaze/View/Components/Component.html" title="class in qmaze.View.Components">Component</a></pre>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- =========== FIELD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="field.summary">
<!-- -->
</a>
<h3>Field Summary</h3>
<ul class="blockList">
<li class="blockList"><a name="fields.inherited.from.class.qmaze.View.Components.Component">
<!-- -->
</a>
<h3>Fields inherited from class qmaze.View.Components.<a href="../../../qmaze/View/Components/Component.html" title="class in qmaze.View.Components">Component</a></h3>
<code><a href="../../../qmaze/View/Components/Component.html#ADJUST_MAZE_STATE">ADJUST_MAZE_STATE</a>, <a href="../../../qmaze/View/Components/Component.html#ADJUST_PARAM_STATE">ADJUST_PARAM_STATE</a>, <a href="../../../qmaze/View/Components/Component.html#assets">assets</a>, <a href="../../../qmaze/View/Components/Component.html#controller">controller</a>, <a href="../../../qmaze/View/Components/Component.html#RESET_STATE">RESET_STATE</a>, <a href="../../../qmaze/View/Components/Component.html#TRAINED_STATE">TRAINED_STATE</a></code></li>
</ul>
</li>
</ul>
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.summary">
<!-- -->
</a>
<h3>Constructor Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
<caption><span>Constructors</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#QMazeGrid-qmaze.View.ViewController-">QMazeGrid</a></span>(<a href="../../../qmaze/View/ViewController.html" title="class in qmaze.View">ViewController</a> controller)</code> </td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd"> </span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd"> </span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#animateMap-java.util.ArrayList-">animateMap</a></span>(java.util.ArrayList<<a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a>> optimalPath)</code> </td>
</tr>
<tr id="i1" class="rowColor">
<td class="colFirst"><code>javafx.scene.layout.Pane</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#build--">build</a></span>()</code> </td>
</tr>
<tr id="i2" class="altColor">
<td class="colFirst"><code><a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#getAgentLocation--">getAgentLocation</a></span>()</code> </td>
</tr>
<tr id="i3" class="rowColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#getColumns--">getColumns</a></span>()</code> </td>
</tr>
<tr id="i4" class="altColor">
<td class="colFirst"><code><a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#getGoalState--">getGoalState</a></span>()</code> </td>
</tr>
<tr id="i5" class="rowColor">
<td class="colFirst"><code>java.util.ArrayList<<a href="../../../qmaze/View/MazeComponents/QMazeRoom.html" title="class in qmaze.View.MazeComponents">QMazeRoom</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#getRooms--">getRooms</a></span>()</code>
<div class="block">Getters/setters</div>
</td>
</tr>
<tr id="i6" class="altColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#getRows--">getRows</a></span>()</code> </td>
</tr>
<tr id="i7" class="rowColor">
<td class="colFirst"><code><a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#getStartingState--">getStartingState</a></span>()</code> </td>
</tr>
<tr id="i8" class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#reset--">reset</a></span>()</code> </td>
</tr>
<tr id="i9" class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#setGoalState-qmaze.Environment.Coordinates-">setGoalState</a></span>(<a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a> goalState)</code> </td>
</tr>
<tr id="i10" class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#setStartingState-qmaze.Environment.Coordinates-">setStartingState</a></span>(<a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a> startingState)</code> </td>
</tr>
<tr id="i11" class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../qmaze/View/MazeComponents/QMazeGrid.html#showVisitCount-java.util.HashMap-">showVisitCount</a></span>(java.util.HashMap<<a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a>,java.lang.Integer> heatMap)</code>
<div class="block">Animation/heatmap stuff</div>
</td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.detail">
<!-- -->
</a>
<h3>Constructor Detail</h3>
<a name="QMazeGrid-qmaze.View.ViewController-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>QMazeGrid</h4>
<pre>public QMazeGrid(<a href="../../../qmaze/View/ViewController.html" title="class in qmaze.View">ViewController</a> controller)</pre>
</li>
</ul>
</li>
</ul>
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="reset--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>reset</h4>
<pre>public void reset()</pre>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code><a href="../../../qmaze/View/Components/Component.html#reset--">reset</a></code> in class <code><a href="../../../qmaze/View/Components/Component.html" title="class in qmaze.View.Components">Component</a></code></dd>
</dl>
</li>
</ul>
<a name="build--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>build</h4>
<pre>public javafx.scene.layout.Pane build()</pre>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code><a href="../../../qmaze/View/Components/Component.html#build--">build</a></code> in class <code><a href="../../../qmaze/View/Components/Component.html" title="class in qmaze.View.Components">Component</a></code></dd>
</dl>
</li>
</ul>
<a name="showVisitCount-java.util.HashMap-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>showVisitCount</h4>
<pre>public void showVisitCount(java.util.HashMap<<a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a>,java.lang.Integer> heatMap)</pre>
<div class="block">Animation/heatmap stuff</div>
</li>
</ul>
<a name="animateMap-java.util.ArrayList-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>animateMap</h4>
<pre>public void animateMap(java.util.ArrayList<<a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a>> optimalPath)</pre>
</li>
</ul>
<a name="getRooms--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getRooms</h4>
<pre>public java.util.ArrayList<<a href="../../../qmaze/View/MazeComponents/QMazeRoom.html" title="class in qmaze.View.MazeComponents">QMazeRoom</a>> getRooms()</pre>
<div class="block">Getters/setters</div>
</li>
</ul>
<a name="getRows--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getRows</h4>
<pre>public int getRows()</pre>
</li>
</ul>
<a name="getColumns--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getColumns</h4>
<pre>public int getColumns()</pre>
</li>
</ul>
<a name="getStartingState--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getStartingState</h4>
<pre>public <a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a> getStartingState()</pre>
</li>
</ul>
<a name="setStartingState-qmaze.Environment.Coordinates-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setStartingState</h4>
<pre>public void setStartingState(<a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a> startingState)</pre>
</li>
</ul>
<a name="getGoalState--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getGoalState</h4>
<pre>public <a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a> getGoalState()</pre>
</li>
</ul>
<a name="setGoalState-qmaze.Environment.Coordinates-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setGoalState</h4>
<pre>public void setGoalState(<a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a> goalState)</pre>
</li>
</ul>
<a name="getAgentLocation--">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>getAgentLocation</h4>
<pre>public <a href="../../../qmaze/Environment/Coordinates.html" title="class in qmaze.Environment">Coordinates</a> getAgentLocation()</pre>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/QMazeGrid.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Class</li>
<li><a href="../../../qmaze/View/MazeComponents/QMazeRoom.html" title="class in qmaze.View.MazeComponents"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?qmaze/View/MazeComponents/QMazeGrid.html" target="_top">Frames</a></li>
<li><a href="QMazeGrid.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li><a href="#fields.inherited.from.class.qmaze.View.Components.Component">Field</a> | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| {
"content_hash": "c563469d32275291712df115cbf7353c",
"timestamp": "",
"source": "github",
"line_count": 449,
"max_line_length": 544,
"avg_line_length": 40.55233853006681,
"alnum_prop": 0.6592706502636204,
"repo_name": "katharinebeaumont/QMaze",
"id": "9c65e0a981516f22f120cb05377a6c4b4e4d120d",
"size": "18208",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example/javadoc/qmaze/View/MazeComponents/QMazeGrid.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "3080"
},
{
"name": "Java",
"bytes": "78860"
},
{
"name": "JavaScript",
"bytes": "154917"
}
],
"symlink_target": ""
} |
package info.opencards.learnstrats.ltm;
import info.opencards.core.CardFile;
import info.opencards.core.Item;
import info.opencards.core.ItemValuater;
import info.opencards.core.LearnMethodFactory;
import java.util.*;
/**
* A simple process-manager which just
*
* @author Holger Brandl
*/
public class RefreshProcessManager extends LTMProcessManager {
public RefreshProcessManager(ItemValuater itemValuater, LearnMethodFactory factory) {
super(itemValuater, factory);
}
public void setupSchedule(Collection<CardFile> curFiles) {
scheduler.clear();
// finally reorder files based on urgentness
List<CardFile> presortFiles = new ArrayList<CardFile>(curFiles);
Collections.sort(presortFiles, new Comparator<CardFile>() {
public int compare(CardFile o1, CardFile o2) {
return (int) (ScheduleUtils.getUrgency(o1.getFlashCards().getLTMItems()) -
ScheduleUtils.getUrgency(o2.getFlashCards().getLTMItems()));
}
});
for (CardFile presortFile : presortFiles) {
ArrayList<Item> fileItems = new ArrayList(presortFile.getFlashCards().getLTMItems());
// remove new items (because this a refreshing scheduler)
fileItems.removeAll(ScheduleUtils.getNewItems(fileItems));
numScheduled += fileItems.size();
scheduler.put(presortFile, fileItems);
}
procIt = scheduler.keySet().iterator();
}
public void itemChanged(Item item, boolean stillOnSchedule, Integer feedback) {
LTMItem ltmItem = (LTMItem) item;
if (ltmItem.isScheduledForToday() && stillOnSchedule)
return;
// reduce the number of reviews by one to keep spacing-model in place for numIt>2
// note: we don't revert the e-factor here but this shouldn't worse the things too much
if (ltmItem.getNumRepetition() > 2)
ltmItem.setNumRepetition(ltmItem.getNumRepetition() - 1);
numProcessed++;
processStatusInfo(null, -1);
}
}
| {
"content_hash": "5460abbfb99bdd2f524a41969067c00a",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 97,
"avg_line_length": 32.04615384615385,
"alnum_prop": 0.6668266922707633,
"repo_name": "Mebus/opencards",
"id": "c53b999c26d681aae95b24719b477762263cae64",
"size": "2083",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/info/opencards/learnstrats/ltm/RefreshProcessManager.java",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Java",
"bytes": "526081"
},
{
"name": "Shell",
"bytes": "767"
}
],
"symlink_target": ""
} |
var chai = require('chai');
global.expect = chai.expect;
chai.use(require('chai-kerouac-handler'));
| {
"content_hash": "42273c79808fe3703123fb70f0ea948b",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 42,
"avg_line_length": 20.4,
"alnum_prop": 0.7058823529411765,
"repo_name": "jaredhanson/kerouac-blog",
"id": "61100a0d2e6fc0f21b27ff992ee2f227aa6f81bc",
"size": "102",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/bootstrap/node.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "77781"
},
{
"name": "Makefile",
"bytes": "444"
}
],
"symlink_target": ""
} |
package zms
import (
"encoding/json"
"fmt"
rdl "github.com/ardielle/ardielle-go/rdl"
)
var _ = rdl.Version
var _ = json.Marshal
var _ = fmt.Printf
//
// SimpleName - Copyright 2016 Yahoo Inc. Licensed under the terms of the
// Apache version 2.0 license. See LICENSE file for terms. Common name types
// used by several API definitions A simple identifier, an element of compound
// name.
//
type SimpleName string
//
// CompoundName - A compound name. Most names in this API are compound names.
//
type CompoundName string
//
// DomainName - A domain name is the general qualifier prefix, as its
// uniqueness is managed.
//
type DomainName string
//
// EntityName - An entity name is a short form of a resource name, including
// only the domain and entity.
//
type EntityName string
//
// ServiceName - A service name will generally be a unique subdomain.
//
type ServiceName string
//
// LocationName - A location name is not yet defined, but will be a dotted name
// like everything else.
//
type LocationName string
//
// ActionName - An action (operation) name.
//
type ActionName string
//
// ResourceName - A resource name Note that the EntityName part is optional,
// that is, a domain name followed by a colon is valid resource name.
//
type ResourceName string
//
// YBase64 - The Y-specific URL-safe Base64 variant.
//
type YBase64 string
//
// YEncoded - YEncoded includes ybase64 chars, as well as = and %. This can
// represent a user cookie and URL-encoded values.
//
type YEncoded string
//
// AuthorityName - Used as the prefix in a signed assertion. This uniquely
// identifies a signing authority.
//
type AuthorityName string
//
// SignedToken - A signed assertion if identity. i.e. the user cookie value.
// This token will only make sense to the authority that generated it, so it is
// beneficial to have something in the value that is cheaply recognized to
// quickly reject if it belongs to another authority. In addition to the
// YEncoded set our token includes ; to separate components and , to separate
// roles and : for IPv6 addresses
//
type SignedToken string
//
// MemberName - Role Member name - could be one of three values: *,
// DomainName.* or ServiceName[*]
//
type MemberName string
//
// Domain - A domain is an independent partition of users, roles, and
// resources. Its name represents the definition of a namespace; the only way a
// new namespace can be created, from the top, is by creating Domains.
// Administration of a domain is governed by the parent domain (using
// reverse-DNS namespaces). The top level domains are governed by the special
// "sys.auth" domain.
//
type Domain struct {
//
// the common name to be referred to, the symbolic id. It is immutable
//
Name DomainName `json:"name"`
//
// the last modification timestamp of any object or attribute in this domain
//
Modified *rdl.Timestamp `json:"modified,omitempty" rdl:"optional"`
//
// unique identifier of the domain. generated on create, never reused
//
Id *rdl.UUID `json:"id,omitempty" rdl:"optional"`
//
// description of the domain
//
Description string `json:"description,omitempty" rdl:"optional"`
//
// a reference to an Organization
//
Org ResourceName `json:"org,omitempty" rdl:"optional"`
//
// Future use only, currently not used
//
Enabled *bool `json:"enabled,omitempty" rdl:"optional"`
//
// Flag indicates whether or not domain modifications should be logged for
// SOX+Auditing. If true, the auditRef parameter must be supplied(not empty) for
// any API defining it.
//
AuditEnabled *bool `json:"auditEnabled,omitempty" rdl:"optional"`
//
// associated cloud (i.e. aws) account id
//
Account string `json:"account,omitempty" rdl:"optional"`
//
// associated product id
//
YpmId *int32 `json:"ypmId,omitempty" rdl:"optional"`
//
// associated application id
//
ApplicationId string `json:"applicationId,omitempty" rdl:"optional"`
}
//
// NewDomain - creates an initialized Domain instance, returns a pointer to it
//
func NewDomain(init ...*Domain) *Domain {
var o *Domain
if len(init) == 1 {
o = init[0]
} else {
o = new(Domain)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *Domain) Init() *Domain {
if self.Enabled == nil {
d := true
self.Enabled = &d
}
if self.AuditEnabled == nil {
d := false
self.AuditEnabled = &d
}
return self
}
type rawDomain Domain
//
// UnmarshalJSON is defined for proper JSON decoding of a Domain
//
func (self *Domain) UnmarshalJSON(b []byte) error {
var m rawDomain
err := json.Unmarshal(b, &m)
if err == nil {
o := Domain(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Domain) Validate() error {
if self.Name == "" {
return fmt.Errorf("Domain.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Name)
if !val.Valid {
return fmt.Errorf("Domain.name does not contain a valid DomainName (%v)", val.Error)
}
}
return nil
}
//
// RoleList - The representation for an enumeration of roles in the namespace,
// with pagination.
//
type RoleList struct {
//
// list of role names
//
Names []EntityName `json:"names"`
//
// if the response is a paginated list, this attribute specifies the value to
// be used in the next role list request as the value for the skip query
// parameter.
//
Next string `json:"next,omitempty" rdl:"optional"`
}
//
// NewRoleList - creates an initialized RoleList instance, returns a pointer to it
//
func NewRoleList(init ...*RoleList) *RoleList {
var o *RoleList
if len(init) == 1 {
o = init[0]
} else {
o = new(RoleList)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *RoleList) Init() *RoleList {
if self.Names == nil {
self.Names = make([]EntityName, 0)
}
return self
}
type rawRoleList RoleList
//
// UnmarshalJSON is defined for proper JSON decoding of a RoleList
//
func (self *RoleList) UnmarshalJSON(b []byte) error {
var m rawRoleList
err := json.Unmarshal(b, &m)
if err == nil {
o := RoleList(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *RoleList) Validate() error {
if self.Names == nil {
return fmt.Errorf("RoleList: Missing required field: names")
}
return nil
}
//
// RoleAuditLog - An audit log entry for role membership change.
//
type RoleAuditLog struct {
//
// name of the role member
//
Member MemberName `json:"member"`
//
// name of the principal executing the change
//
Admin ResourceName `json:"admin"`
//
// timestamp of the entry
//
Created rdl.Timestamp `json:"created"`
//
// log action - either add or delete
//
Action string `json:"action"`
//
// audit reference string for the change as supplied by admin
//
AuditRef string `json:"auditRef,omitempty" rdl:"optional"`
}
//
// NewRoleAuditLog - creates an initialized RoleAuditLog instance, returns a pointer to it
//
func NewRoleAuditLog(init ...*RoleAuditLog) *RoleAuditLog {
var o *RoleAuditLog
if len(init) == 1 {
o = init[0]
} else {
o = new(RoleAuditLog)
}
return o
}
type rawRoleAuditLog RoleAuditLog
//
// UnmarshalJSON is defined for proper JSON decoding of a RoleAuditLog
//
func (self *RoleAuditLog) UnmarshalJSON(b []byte) error {
var m rawRoleAuditLog
err := json.Unmarshal(b, &m)
if err == nil {
o := RoleAuditLog(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *RoleAuditLog) Validate() error {
if self.Member == "" {
return fmt.Errorf("RoleAuditLog.member is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "MemberName", self.Member)
if !val.Valid {
return fmt.Errorf("RoleAuditLog.member does not contain a valid MemberName (%v)", val.Error)
}
}
if self.Admin == "" {
return fmt.Errorf("RoleAuditLog.admin is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "ResourceName", self.Admin)
if !val.Valid {
return fmt.Errorf("RoleAuditLog.admin does not contain a valid ResourceName (%v)", val.Error)
}
}
if self.Created.IsZero() {
return fmt.Errorf("RoleAuditLog: Missing required field: created")
}
if self.Action == "" {
return fmt.Errorf("RoleAuditLog.action is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.Action)
if !val.Valid {
return fmt.Errorf("RoleAuditLog.action does not contain a valid String (%v)", val.Error)
}
}
return nil
}
//
// RoleMember -
//
type RoleMember struct {
//
// name of the member
//
MemberName MemberName `json:"memberName"`
//
// the expiration timestamp
//
Expiration *rdl.Timestamp `json:"expiration,omitempty" rdl:"optional"`
}
//
// NewRoleMember - creates an initialized RoleMember instance, returns a pointer to it
//
func NewRoleMember(init ...*RoleMember) *RoleMember {
var o *RoleMember
if len(init) == 1 {
o = init[0]
} else {
o = new(RoleMember)
}
return o
}
type rawRoleMember RoleMember
//
// UnmarshalJSON is defined for proper JSON decoding of a RoleMember
//
func (self *RoleMember) UnmarshalJSON(b []byte) error {
var m rawRoleMember
err := json.Unmarshal(b, &m)
if err == nil {
o := RoleMember(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *RoleMember) Validate() error {
if self.MemberName == "" {
return fmt.Errorf("RoleMember.memberName is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "MemberName", self.MemberName)
if !val.Valid {
return fmt.Errorf("RoleMember.memberName does not contain a valid MemberName (%v)", val.Error)
}
}
return nil
}
//
// Role - The representation for a Role with set of members.
//
type Role struct {
//
// name of the role
//
Name ResourceName `json:"name"`
//
// last modification timestamp of the role
//
Modified *rdl.Timestamp `json:"modified,omitempty" rdl:"optional"`
//
// an explicit list of members. Might be empty or null, if trust is set
//
Members []MemberName `json:"members,omitempty" rdl:"optional"`
//
// members with expiration
//
RoleMembers []*RoleMember `json:"roleMembers,omitempty" rdl:"optional"`
//
// a trusted domain to delegate membership decisions to
//
Trust DomainName `json:"trust,omitempty" rdl:"optional"`
//
// an audit log for role membership changes
//
AuditLog []*RoleAuditLog `json:"auditLog,omitempty" rdl:"optional"`
}
//
// NewRole - creates an initialized Role instance, returns a pointer to it
//
func NewRole(init ...*Role) *Role {
var o *Role
if len(init) == 1 {
o = init[0]
} else {
o = new(Role)
}
return o
}
type rawRole Role
//
// UnmarshalJSON is defined for proper JSON decoding of a Role
//
func (self *Role) UnmarshalJSON(b []byte) error {
var m rawRole
err := json.Unmarshal(b, &m)
if err == nil {
o := Role(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Role) Validate() error {
if self.Name == "" {
return fmt.Errorf("Role.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "ResourceName", self.Name)
if !val.Valid {
return fmt.Errorf("Role.name does not contain a valid ResourceName (%v)", val.Error)
}
}
return nil
}
//
// Roles - The representation for a list of roles with full details
//
type Roles struct {
//
// list of role objects
//
List []*Role `json:"list"`
}
//
// NewRoles - creates an initialized Roles instance, returns a pointer to it
//
func NewRoles(init ...*Roles) *Roles {
var o *Roles
if len(init) == 1 {
o = init[0]
} else {
o = new(Roles)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *Roles) Init() *Roles {
if self.List == nil {
self.List = make([]*Role, 0)
}
return self
}
type rawRoles Roles
//
// UnmarshalJSON is defined for proper JSON decoding of a Roles
//
func (self *Roles) UnmarshalJSON(b []byte) error {
var m rawRoles
err := json.Unmarshal(b, &m)
if err == nil {
o := Roles(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Roles) Validate() error {
if self.List == nil {
return fmt.Errorf("Roles: Missing required field: list")
}
return nil
}
//
// Membership - The representation for a role membership.
//
type Membership struct {
//
// name of the member
//
MemberName MemberName `json:"memberName"`
//
// flag to indicate whether or the user is a member or not
//
IsMember *bool `json:"isMember,omitempty" rdl:"optional"`
//
// name of the role
//
RoleName ResourceName `json:"roleName,omitempty" rdl:"optional"`
//
// the expiration timestamp
//
Expiration *rdl.Timestamp `json:"expiration,omitempty" rdl:"optional"`
}
//
// NewMembership - creates an initialized Membership instance, returns a pointer to it
//
func NewMembership(init ...*Membership) *Membership {
var o *Membership
if len(init) == 1 {
o = init[0]
} else {
o = new(Membership)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *Membership) Init() *Membership {
if self.IsMember == nil {
d := true
self.IsMember = &d
}
return self
}
type rawMembership Membership
//
// UnmarshalJSON is defined for proper JSON decoding of a Membership
//
func (self *Membership) UnmarshalJSON(b []byte) error {
var m rawMembership
err := json.Unmarshal(b, &m)
if err == nil {
o := Membership(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Membership) Validate() error {
if self.MemberName == "" {
return fmt.Errorf("Membership.memberName is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "MemberName", self.MemberName)
if !val.Valid {
return fmt.Errorf("Membership.memberName does not contain a valid MemberName (%v)", val.Error)
}
}
return nil
}
//
// DefaultAdmins - The list of domain administrators.
//
type DefaultAdmins struct {
//
// list of domain administrators
//
Admins []ResourceName `json:"admins"`
}
//
// NewDefaultAdmins - creates an initialized DefaultAdmins instance, returns a pointer to it
//
func NewDefaultAdmins(init ...*DefaultAdmins) *DefaultAdmins {
var o *DefaultAdmins
if len(init) == 1 {
o = init[0]
} else {
o = new(DefaultAdmins)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *DefaultAdmins) Init() *DefaultAdmins {
if self.Admins == nil {
self.Admins = make([]ResourceName, 0)
}
return self
}
type rawDefaultAdmins DefaultAdmins
//
// UnmarshalJSON is defined for proper JSON decoding of a DefaultAdmins
//
func (self *DefaultAdmins) UnmarshalJSON(b []byte) error {
var m rawDefaultAdmins
err := json.Unmarshal(b, &m)
if err == nil {
o := DefaultAdmins(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *DefaultAdmins) Validate() error {
if self.Admins == nil {
return fmt.Errorf("DefaultAdmins: Missing required field: admins")
}
return nil
}
//
// AssertionEffect - Every assertion can have the effect of ALLOW or DENY.
//
type AssertionEffect int
//
// AssertionEffect constants
//
const (
_ AssertionEffect = iota
ALLOW
DENY
)
var namesAssertionEffect = []string{
ALLOW: "ALLOW",
DENY: "DENY",
}
//
// NewAssertionEffect - return a string representation of the enum
//
func NewAssertionEffect(init ...interface{}) AssertionEffect {
if len(init) == 1 {
switch v := init[0].(type) {
case AssertionEffect:
return v
case int:
return AssertionEffect(v)
case int32:
return AssertionEffect(v)
case string:
for i, s := range namesAssertionEffect {
if s == v {
return AssertionEffect(i)
}
}
default:
panic("Bad init value for AssertionEffect enum")
}
}
return AssertionEffect(0) //default to the first enum value
}
//
// String - return a string representation of the enum
//
func (e AssertionEffect) String() string {
return namesAssertionEffect[e]
}
//
// SymbolSet - return an array of all valid string representations (symbols) of the enum
//
func (e AssertionEffect) SymbolSet() []string {
return namesAssertionEffect
}
//
// MarshalJSON is defined for proper JSON encoding of a AssertionEffect
//
func (e AssertionEffect) MarshalJSON() ([]byte, error) {
return json.Marshal(e.String())
}
//
// UnmarshalJSON is defined for proper JSON decoding of a AssertionEffect
//
func (e *AssertionEffect) UnmarshalJSON(b []byte) error {
var j string
err := json.Unmarshal(b, &j)
if err == nil {
s := string(j)
for v, s2 := range namesAssertionEffect {
if s == s2 {
*e = AssertionEffect(v)
return nil
}
}
err = fmt.Errorf("Bad enum symbol for type AssertionEffect: %s", s)
}
return err
}
//
// Assertion - A representation for the encapsulation of an action to be
// performed on a resource by a principal.
//
type Assertion struct {
//
// the subject of the assertion - a role
//
Role string `json:"role"`
//
// the object of the assertion. Must be in the local namespace. Can contain
// wildcards
//
Resource string `json:"resource"`
//
// the predicate of the assertion. Can contain wildcards
//
Action string `json:"action"`
//
// the effect of the assertion in the policy language
//
Effect *AssertionEffect `json:"effect,omitempty" rdl:"optional"`
//
// assertion id - auto generated by server. Not required during put
// operations.
//
Id *int64 `json:"id,omitempty" rdl:"optional"`
}
//
// NewAssertion - creates an initialized Assertion instance, returns a pointer to it
//
func NewAssertion(init ...*Assertion) *Assertion {
var o *Assertion
if len(init) == 1 {
o = init[0]
} else {
o = new(Assertion)
}
return o
}
type rawAssertion Assertion
//
// UnmarshalJSON is defined for proper JSON decoding of a Assertion
//
func (self *Assertion) UnmarshalJSON(b []byte) error {
var m rawAssertion
err := json.Unmarshal(b, &m)
if err == nil {
o := Assertion(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Assertion) Validate() error {
if self.Role == "" {
return fmt.Errorf("Assertion.role is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.Role)
if !val.Valid {
return fmt.Errorf("Assertion.role does not contain a valid String (%v)", val.Error)
}
}
if self.Resource == "" {
return fmt.Errorf("Assertion.resource is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.Resource)
if !val.Valid {
return fmt.Errorf("Assertion.resource does not contain a valid String (%v)", val.Error)
}
}
if self.Action == "" {
return fmt.Errorf("Assertion.action is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.Action)
if !val.Valid {
return fmt.Errorf("Assertion.action does not contain a valid String (%v)", val.Error)
}
}
return nil
}
//
// Policy - The representation for a Policy with set of assertions.
//
type Policy struct {
//
// name of the policy
//
Name ResourceName `json:"name"`
//
// last modification timestamp of this policy
//
Modified *rdl.Timestamp `json:"modified,omitempty" rdl:"optional"`
//
// list of defined assertions for this policy
//
Assertions []*Assertion `json:"assertions"`
}
//
// NewPolicy - creates an initialized Policy instance, returns a pointer to it
//
func NewPolicy(init ...*Policy) *Policy {
var o *Policy
if len(init) == 1 {
o = init[0]
} else {
o = new(Policy)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *Policy) Init() *Policy {
if self.Assertions == nil {
self.Assertions = make([]*Assertion, 0)
}
return self
}
type rawPolicy Policy
//
// UnmarshalJSON is defined for proper JSON decoding of a Policy
//
func (self *Policy) UnmarshalJSON(b []byte) error {
var m rawPolicy
err := json.Unmarshal(b, &m)
if err == nil {
o := Policy(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Policy) Validate() error {
if self.Name == "" {
return fmt.Errorf("Policy.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "ResourceName", self.Name)
if !val.Valid {
return fmt.Errorf("Policy.name does not contain a valid ResourceName (%v)", val.Error)
}
}
if self.Assertions == nil {
return fmt.Errorf("Policy: Missing required field: assertions")
}
return nil
}
//
// Policies - The representation of list of policy objects
//
type Policies struct {
//
// list of policy objects
//
List []*Policy `json:"list"`
}
//
// NewPolicies - creates an initialized Policies instance, returns a pointer to it
//
func NewPolicies(init ...*Policies) *Policies {
var o *Policies
if len(init) == 1 {
o = init[0]
} else {
o = new(Policies)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *Policies) Init() *Policies {
if self.List == nil {
self.List = make([]*Policy, 0)
}
return self
}
type rawPolicies Policies
//
// UnmarshalJSON is defined for proper JSON decoding of a Policies
//
func (self *Policies) UnmarshalJSON(b []byte) error {
var m rawPolicies
err := json.Unmarshal(b, &m)
if err == nil {
o := Policies(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Policies) Validate() error {
if self.List == nil {
return fmt.Errorf("Policies: Missing required field: list")
}
return nil
}
//
// PublicKeyEntry - The representation of the public key in a service identity
// object.
//
type PublicKeyEntry struct {
//
// the public key for the service
//
Key string `json:"key"`
//
// the key identifier (version or zone name)
//
Id string `json:"id"`
}
//
// NewPublicKeyEntry - creates an initialized PublicKeyEntry instance, returns a pointer to it
//
func NewPublicKeyEntry(init ...*PublicKeyEntry) *PublicKeyEntry {
var o *PublicKeyEntry
if len(init) == 1 {
o = init[0]
} else {
o = new(PublicKeyEntry)
}
return o
}
type rawPublicKeyEntry PublicKeyEntry
//
// UnmarshalJSON is defined for proper JSON decoding of a PublicKeyEntry
//
func (self *PublicKeyEntry) UnmarshalJSON(b []byte) error {
var m rawPublicKeyEntry
err := json.Unmarshal(b, &m)
if err == nil {
o := PublicKeyEntry(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *PublicKeyEntry) Validate() error {
if self.Key == "" {
return fmt.Errorf("PublicKeyEntry.key is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.Key)
if !val.Valid {
return fmt.Errorf("PublicKeyEntry.key does not contain a valid String (%v)", val.Error)
}
}
if self.Id == "" {
return fmt.Errorf("PublicKeyEntry.id is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.Id)
if !val.Valid {
return fmt.Errorf("PublicKeyEntry.id does not contain a valid String (%v)", val.Error)
}
}
return nil
}
//
// ServiceIdentity - The representation of the service identity object.
//
type ServiceIdentity struct {
//
// the full name of the service, i.e. "sports.storage"
//
Name ServiceName `json:"name"`
//
// description of the service
//
Description string `json:"description,omitempty" rdl:"optional"`
//
// array of public keys for key rotation
//
PublicKeys []*PublicKeyEntry `json:"publicKeys,omitempty" rdl:"optional"`
//
// if present, then this service can provision tenants via this endpoint.
//
ProviderEndpoint string `json:"providerEndpoint,omitempty" rdl:"optional"`
//
// the timestamp when this entry was last modified
//
Modified *rdl.Timestamp `json:"modified,omitempty" rdl:"optional"`
//
// the path of the executable that runs the service
//
Executable string `json:"executable,omitempty" rdl:"optional"`
//
// list of host names that this service can run on
//
Hosts []string `json:"hosts,omitempty" rdl:"optional"`
//
// local (unix) user name this service can run as
//
User string `json:"user,omitempty" rdl:"optional"`
//
// local (unix) group name this service can run as
//
Group string `json:"group,omitempty" rdl:"optional"`
}
//
// NewServiceIdentity - creates an initialized ServiceIdentity instance, returns a pointer to it
//
func NewServiceIdentity(init ...*ServiceIdentity) *ServiceIdentity {
var o *ServiceIdentity
if len(init) == 1 {
o = init[0]
} else {
o = new(ServiceIdentity)
}
return o
}
type rawServiceIdentity ServiceIdentity
//
// UnmarshalJSON is defined for proper JSON decoding of a ServiceIdentity
//
func (self *ServiceIdentity) UnmarshalJSON(b []byte) error {
var m rawServiceIdentity
err := json.Unmarshal(b, &m)
if err == nil {
o := ServiceIdentity(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *ServiceIdentity) Validate() error {
if self.Name == "" {
return fmt.Errorf("ServiceIdentity.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "ServiceName", self.Name)
if !val.Valid {
return fmt.Errorf("ServiceIdentity.name does not contain a valid ServiceName (%v)", val.Error)
}
}
return nil
}
//
// ServiceIdentities - The representation of list of services
//
type ServiceIdentities struct {
//
// list of services
//
List []*ServiceIdentity `json:"list"`
}
//
// NewServiceIdentities - creates an initialized ServiceIdentities instance, returns a pointer to it
//
func NewServiceIdentities(init ...*ServiceIdentities) *ServiceIdentities {
var o *ServiceIdentities
if len(init) == 1 {
o = init[0]
} else {
o = new(ServiceIdentities)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *ServiceIdentities) Init() *ServiceIdentities {
if self.List == nil {
self.List = make([]*ServiceIdentity, 0)
}
return self
}
type rawServiceIdentities ServiceIdentities
//
// UnmarshalJSON is defined for proper JSON decoding of a ServiceIdentities
//
func (self *ServiceIdentities) UnmarshalJSON(b []byte) error {
var m rawServiceIdentities
err := json.Unmarshal(b, &m)
if err == nil {
o := ServiceIdentities(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *ServiceIdentities) Validate() error {
if self.List == nil {
return fmt.Errorf("ServiceIdentities: Missing required field: list")
}
return nil
}
//
// ServiceIdentityList - The representation for an enumeration of services in
// the namespace, with pagination.
//
type ServiceIdentityList struct {
//
// list of service names
//
Names []EntityName `json:"names"`
//
// if the response is a paginated list, this attribute specifies the value to
// be used in the next service list request as the value for the skip query
// parameter.
//
Next string `json:"next,omitempty" rdl:"optional"`
}
//
// NewServiceIdentityList - creates an initialized ServiceIdentityList instance, returns a pointer to it
//
func NewServiceIdentityList(init ...*ServiceIdentityList) *ServiceIdentityList {
var o *ServiceIdentityList
if len(init) == 1 {
o = init[0]
} else {
o = new(ServiceIdentityList)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *ServiceIdentityList) Init() *ServiceIdentityList {
if self.Names == nil {
self.Names = make([]EntityName, 0)
}
return self
}
type rawServiceIdentityList ServiceIdentityList
//
// UnmarshalJSON is defined for proper JSON decoding of a ServiceIdentityList
//
func (self *ServiceIdentityList) UnmarshalJSON(b []byte) error {
var m rawServiceIdentityList
err := json.Unmarshal(b, &m)
if err == nil {
o := ServiceIdentityList(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *ServiceIdentityList) Validate() error {
if self.Names == nil {
return fmt.Errorf("ServiceIdentityList: Missing required field: names")
}
return nil
}
//
// Template - Solution Template object defined on the server
//
type Template struct {
//
// list of roles in the template
//
Roles []*Role `json:"roles"`
//
// list of policies defined in this template
//
Policies []*Policy `json:"policies"`
//
// list of services defined in this template
//
Services []*ServiceIdentity `json:"services,omitempty" rdl:"optional"`
}
//
// NewTemplate - creates an initialized Template instance, returns a pointer to it
//
func NewTemplate(init ...*Template) *Template {
var o *Template
if len(init) == 1 {
o = init[0]
} else {
o = new(Template)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *Template) Init() *Template {
if self.Roles == nil {
self.Roles = make([]*Role, 0)
}
if self.Policies == nil {
self.Policies = make([]*Policy, 0)
}
return self
}
type rawTemplate Template
//
// UnmarshalJSON is defined for proper JSON decoding of a Template
//
func (self *Template) UnmarshalJSON(b []byte) error {
var m rawTemplate
err := json.Unmarshal(b, &m)
if err == nil {
o := Template(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Template) Validate() error {
if self.Roles == nil {
return fmt.Errorf("Template: Missing required field: roles")
}
if self.Policies == nil {
return fmt.Errorf("Template: Missing required field: policies")
}
return nil
}
//
// TemplateList - List of template names that is the base struct for server and
// domain templates
//
type TemplateList struct {
//
// list of template names
//
TemplateNames []SimpleName `json:"templateNames"`
}
//
// NewTemplateList - creates an initialized TemplateList instance, returns a pointer to it
//
func NewTemplateList(init ...*TemplateList) *TemplateList {
var o *TemplateList
if len(init) == 1 {
o = init[0]
} else {
o = new(TemplateList)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *TemplateList) Init() *TemplateList {
if self.TemplateNames == nil {
self.TemplateNames = make([]SimpleName, 0)
}
return self
}
type rawTemplateList TemplateList
//
// UnmarshalJSON is defined for proper JSON decoding of a TemplateList
//
func (self *TemplateList) UnmarshalJSON(b []byte) error {
var m rawTemplateList
err := json.Unmarshal(b, &m)
if err == nil {
o := TemplateList(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *TemplateList) Validate() error {
if self.TemplateNames == nil {
return fmt.Errorf("TemplateList: Missing required field: templateNames")
}
return nil
}
//
// TemplateParam -
//
type TemplateParam struct {
//
// name of the parameter
//
Name SimpleName `json:"name"`
//
// value of the parameter
//
Value CompoundName `json:"value"`
}
//
// NewTemplateParam - creates an initialized TemplateParam instance, returns a pointer to it
//
func NewTemplateParam(init ...*TemplateParam) *TemplateParam {
var o *TemplateParam
if len(init) == 1 {
o = init[0]
} else {
o = new(TemplateParam)
}
return o
}
type rawTemplateParam TemplateParam
//
// UnmarshalJSON is defined for proper JSON decoding of a TemplateParam
//
func (self *TemplateParam) UnmarshalJSON(b []byte) error {
var m rawTemplateParam
err := json.Unmarshal(b, &m)
if err == nil {
o := TemplateParam(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *TemplateParam) Validate() error {
if self.Name == "" {
return fmt.Errorf("TemplateParam.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "SimpleName", self.Name)
if !val.Valid {
return fmt.Errorf("TemplateParam.name does not contain a valid SimpleName (%v)", val.Error)
}
}
if self.Value == "" {
return fmt.Errorf("TemplateParam.value is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "CompoundName", self.Value)
if !val.Valid {
return fmt.Errorf("TemplateParam.value does not contain a valid CompoundName (%v)", val.Error)
}
}
return nil
}
//
// DomainTemplate - solution template(s) to be applied to a domain
//
type DomainTemplate struct {
//
// list of template names
//
TemplateNames []SimpleName `json:"templateNames"`
//
// optional template parameters
//
Params []*TemplateParam `json:"params,omitempty" rdl:"optional"`
}
//
// NewDomainTemplate - creates an initialized DomainTemplate instance, returns a pointer to it
//
func NewDomainTemplate(init ...*DomainTemplate) *DomainTemplate {
var o *DomainTemplate
if len(init) == 1 {
o = init[0]
} else {
o = new(DomainTemplate)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *DomainTemplate) Init() *DomainTemplate {
if self.TemplateNames == nil {
self.TemplateNames = make([]SimpleName, 0)
}
return self
}
type rawDomainTemplate DomainTemplate
//
// UnmarshalJSON is defined for proper JSON decoding of a DomainTemplate
//
func (self *DomainTemplate) UnmarshalJSON(b []byte) error {
var m rawDomainTemplate
err := json.Unmarshal(b, &m)
if err == nil {
o := DomainTemplate(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *DomainTemplate) Validate() error {
if self.TemplateNames == nil {
return fmt.Errorf("DomainTemplate: Missing required field: templateNames")
}
return nil
}
//
// DomainTemplateList - List of solution templates to be applied to a domain
//
type DomainTemplateList struct {
//
// list of template names
//
TemplateNames []SimpleName `json:"templateNames"`
}
//
// NewDomainTemplateList - creates an initialized DomainTemplateList instance, returns a pointer to it
//
func NewDomainTemplateList(init ...*DomainTemplateList) *DomainTemplateList {
var o *DomainTemplateList
if len(init) == 1 {
o = init[0]
} else {
o = new(DomainTemplateList)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *DomainTemplateList) Init() *DomainTemplateList {
if self.TemplateNames == nil {
self.TemplateNames = make([]SimpleName, 0)
}
return self
}
type rawDomainTemplateList DomainTemplateList
//
// UnmarshalJSON is defined for proper JSON decoding of a DomainTemplateList
//
func (self *DomainTemplateList) UnmarshalJSON(b []byte) error {
var m rawDomainTemplateList
err := json.Unmarshal(b, &m)
if err == nil {
o := DomainTemplateList(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *DomainTemplateList) Validate() error {
if self.TemplateNames == nil {
return fmt.Errorf("DomainTemplateList: Missing required field: templateNames")
}
return nil
}
//
// ServerTemplateList - List of solution templates available in the server
//
type ServerTemplateList struct {
//
// list of template names
//
TemplateNames []SimpleName `json:"templateNames"`
}
//
// NewServerTemplateList - creates an initialized ServerTemplateList instance, returns a pointer to it
//
func NewServerTemplateList(init ...*ServerTemplateList) *ServerTemplateList {
var o *ServerTemplateList
if len(init) == 1 {
o = init[0]
} else {
o = new(ServerTemplateList)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *ServerTemplateList) Init() *ServerTemplateList {
if self.TemplateNames == nil {
self.TemplateNames = make([]SimpleName, 0)
}
return self
}
type rawServerTemplateList ServerTemplateList
//
// UnmarshalJSON is defined for proper JSON decoding of a ServerTemplateList
//
func (self *ServerTemplateList) UnmarshalJSON(b []byte) error {
var m rawServerTemplateList
err := json.Unmarshal(b, &m)
if err == nil {
o := ServerTemplateList(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *ServerTemplateList) Validate() error {
if self.TemplateNames == nil {
return fmt.Errorf("ServerTemplateList: Missing required field: templateNames")
}
return nil
}
//
// DomainList - A paginated list of domains.
//
type DomainList struct {
//
// list of domain names
//
Names []DomainName `json:"names"`
//
// if the response is a paginated list, this attribute specifies the value to
// be used in the next domain list request as the value for the skip query
// parameter.
//
Next string `json:"next,omitempty" rdl:"optional"`
}
//
// NewDomainList - creates an initialized DomainList instance, returns a pointer to it
//
func NewDomainList(init ...*DomainList) *DomainList {
var o *DomainList
if len(init) == 1 {
o = init[0]
} else {
o = new(DomainList)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *DomainList) Init() *DomainList {
if self.Names == nil {
self.Names = make([]DomainName, 0)
}
return self
}
type rawDomainList DomainList
//
// UnmarshalJSON is defined for proper JSON decoding of a DomainList
//
func (self *DomainList) UnmarshalJSON(b []byte) error {
var m rawDomainList
err := json.Unmarshal(b, &m)
if err == nil {
o := DomainList(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *DomainList) Validate() error {
if self.Names == nil {
return fmt.Errorf("DomainList: Missing required field: names")
}
return nil
}
//
// DomainMeta - Set of metadata attributes that all domains may have and can be
// changed.
//
type DomainMeta struct {
//
// a description of the domain
//
Description string `json:"description,omitempty" rdl:"optional"`
//
// a reference to an Organization. (i.e. org:media)
//
Org ResourceName `json:"org,omitempty" rdl:"optional"`
//
// Future use only, currently not used
//
Enabled *bool `json:"enabled,omitempty" rdl:"optional"`
//
// Flag indicates whether or not domain modifications should be logged for
// SOX+Auditing. If true, the auditRef parameter must be supplied(not empty) for
// any API defining it.
//
AuditEnabled *bool `json:"auditEnabled,omitempty" rdl:"optional"`
//
// associated cloud (i.e. aws) account id
//
Account string `json:"account,omitempty" rdl:"optional"`
//
// associated product id
//
YpmId *int32 `json:"ypmId,omitempty" rdl:"optional"`
//
// associated application id
//
ApplicationId string `json:"applicationId,omitempty" rdl:"optional"`
}
//
// NewDomainMeta - creates an initialized DomainMeta instance, returns a pointer to it
//
func NewDomainMeta(init ...*DomainMeta) *DomainMeta {
var o *DomainMeta
if len(init) == 1 {
o = init[0]
} else {
o = new(DomainMeta)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *DomainMeta) Init() *DomainMeta {
if self.Enabled == nil {
d := true
self.Enabled = &d
}
if self.AuditEnabled == nil {
d := false
self.AuditEnabled = &d
}
return self
}
type rawDomainMeta DomainMeta
//
// UnmarshalJSON is defined for proper JSON decoding of a DomainMeta
//
func (self *DomainMeta) UnmarshalJSON(b []byte) error {
var m rawDomainMeta
err := json.Unmarshal(b, &m)
if err == nil {
o := DomainMeta(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *DomainMeta) Validate() error {
return nil
}
//
// TopLevelDomain - Top Level Domain object. The required attributes include
// the name of the domain and list of domain administrators.
//
type TopLevelDomain struct {
//
// a description of the domain
//
Description string `json:"description,omitempty" rdl:"optional"`
//
// a reference to an Organization. (i.e. org:media)
//
Org ResourceName `json:"org,omitempty" rdl:"optional"`
//
// Future use only, currently not used
//
Enabled *bool `json:"enabled,omitempty" rdl:"optional"`
//
// Flag indicates whether or not domain modifications should be logged for
// SOX+Auditing. If true, the auditRef parameter must be supplied(not empty) for
// any API defining it.
//
AuditEnabled *bool `json:"auditEnabled,omitempty" rdl:"optional"`
//
// associated cloud (i.e. aws) account id
//
Account string `json:"account,omitempty" rdl:"optional"`
//
// associated product id
//
YpmId *int32 `json:"ypmId,omitempty" rdl:"optional"`
//
// associated application id
//
ApplicationId string `json:"applicationId,omitempty" rdl:"optional"`
//
// name of the domain
//
Name SimpleName `json:"name"`
//
// list of domain administrators
//
AdminUsers []ResourceName `json:"adminUsers"`
//
// list of solution template names
//
Templates *DomainTemplateList `json:"templates,omitempty" rdl:"optional"`
}
//
// NewTopLevelDomain - creates an initialized TopLevelDomain instance, returns a pointer to it
//
func NewTopLevelDomain(init ...*TopLevelDomain) *TopLevelDomain {
var o *TopLevelDomain
if len(init) == 1 {
o = init[0]
} else {
o = new(TopLevelDomain)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *TopLevelDomain) Init() *TopLevelDomain {
if self.Enabled == nil {
d := true
self.Enabled = &d
}
if self.AuditEnabled == nil {
d := false
self.AuditEnabled = &d
}
if self.AdminUsers == nil {
self.AdminUsers = make([]ResourceName, 0)
}
return self
}
type rawTopLevelDomain TopLevelDomain
//
// UnmarshalJSON is defined for proper JSON decoding of a TopLevelDomain
//
func (self *TopLevelDomain) UnmarshalJSON(b []byte) error {
var m rawTopLevelDomain
err := json.Unmarshal(b, &m)
if err == nil {
o := TopLevelDomain(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *TopLevelDomain) Validate() error {
if self.Name == "" {
return fmt.Errorf("TopLevelDomain.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "SimpleName", self.Name)
if !val.Valid {
return fmt.Errorf("TopLevelDomain.name does not contain a valid SimpleName (%v)", val.Error)
}
}
if self.AdminUsers == nil {
return fmt.Errorf("TopLevelDomain: Missing required field: adminUsers")
}
return nil
}
//
// SubDomain - A Subdomain is a TopLevelDomain, except it has a parent.
//
type SubDomain struct {
//
// a description of the domain
//
Description string `json:"description,omitempty" rdl:"optional"`
//
// a reference to an Organization. (i.e. org:media)
//
Org ResourceName `json:"org,omitempty" rdl:"optional"`
//
// Future use only, currently not used
//
Enabled *bool `json:"enabled,omitempty" rdl:"optional"`
//
// Flag indicates whether or not domain modifications should be logged for
// SOX+Auditing. If true, the auditRef parameter must be supplied(not empty) for
// any API defining it.
//
AuditEnabled *bool `json:"auditEnabled,omitempty" rdl:"optional"`
//
// associated cloud (i.e. aws) account id
//
Account string `json:"account,omitempty" rdl:"optional"`
//
// associated product id
//
YpmId *int32 `json:"ypmId,omitempty" rdl:"optional"`
//
// associated application id
//
ApplicationId string `json:"applicationId,omitempty" rdl:"optional"`
//
// name of the domain
//
Name SimpleName `json:"name"`
//
// list of domain administrators
//
AdminUsers []ResourceName `json:"adminUsers"`
//
// list of solution template names
//
Templates *DomainTemplateList `json:"templates,omitempty" rdl:"optional"`
//
// name of the parent domain
//
Parent DomainName `json:"parent"`
}
//
// NewSubDomain - creates an initialized SubDomain instance, returns a pointer to it
//
func NewSubDomain(init ...*SubDomain) *SubDomain {
var o *SubDomain
if len(init) == 1 {
o = init[0]
} else {
o = new(SubDomain)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *SubDomain) Init() *SubDomain {
if self.Enabled == nil {
d := true
self.Enabled = &d
}
if self.AuditEnabled == nil {
d := false
self.AuditEnabled = &d
}
if self.AdminUsers == nil {
self.AdminUsers = make([]ResourceName, 0)
}
return self
}
type rawSubDomain SubDomain
//
// UnmarshalJSON is defined for proper JSON decoding of a SubDomain
//
func (self *SubDomain) UnmarshalJSON(b []byte) error {
var m rawSubDomain
err := json.Unmarshal(b, &m)
if err == nil {
o := SubDomain(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *SubDomain) Validate() error {
if self.Name == "" {
return fmt.Errorf("SubDomain.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "SimpleName", self.Name)
if !val.Valid {
return fmt.Errorf("SubDomain.name does not contain a valid SimpleName (%v)", val.Error)
}
}
if self.AdminUsers == nil {
return fmt.Errorf("SubDomain: Missing required field: adminUsers")
}
if self.Parent == "" {
return fmt.Errorf("SubDomain.parent is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Parent)
if !val.Valid {
return fmt.Errorf("SubDomain.parent does not contain a valid DomainName (%v)", val.Error)
}
}
return nil
}
//
// UserDomain - A UserDomain is the user's own top level domain in user - e.g.
// user.hga
//
type UserDomain struct {
//
// a description of the domain
//
Description string `json:"description,omitempty" rdl:"optional"`
//
// a reference to an Organization. (i.e. org:media)
//
Org ResourceName `json:"org,omitempty" rdl:"optional"`
//
// Future use only, currently not used
//
Enabled *bool `json:"enabled,omitempty" rdl:"optional"`
//
// Flag indicates whether or not domain modifications should be logged for
// SOX+Auditing. If true, the auditRef parameter must be supplied(not empty) for
// any API defining it.
//
AuditEnabled *bool `json:"auditEnabled,omitempty" rdl:"optional"`
//
// associated cloud (i.e. aws) account id
//
Account string `json:"account,omitempty" rdl:"optional"`
//
// associated product id
//
YpmId *int32 `json:"ypmId,omitempty" rdl:"optional"`
//
// associated application id
//
ApplicationId string `json:"applicationId,omitempty" rdl:"optional"`
//
// user id which will be the domain name
//
Name SimpleName `json:"name"`
//
// list of solution template names
//
Templates *DomainTemplateList `json:"templates,omitempty" rdl:"optional"`
}
//
// NewUserDomain - creates an initialized UserDomain instance, returns a pointer to it
//
func NewUserDomain(init ...*UserDomain) *UserDomain {
var o *UserDomain
if len(init) == 1 {
o = init[0]
} else {
o = new(UserDomain)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *UserDomain) Init() *UserDomain {
if self.Enabled == nil {
d := true
self.Enabled = &d
}
if self.AuditEnabled == nil {
d := false
self.AuditEnabled = &d
}
return self
}
type rawUserDomain UserDomain
//
// UnmarshalJSON is defined for proper JSON decoding of a UserDomain
//
func (self *UserDomain) UnmarshalJSON(b []byte) error {
var m rawUserDomain
err := json.Unmarshal(b, &m)
if err == nil {
o := UserDomain(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *UserDomain) Validate() error {
if self.Name == "" {
return fmt.Errorf("UserDomain.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "SimpleName", self.Name)
if !val.Valid {
return fmt.Errorf("UserDomain.name does not contain a valid SimpleName (%v)", val.Error)
}
}
return nil
}
//
// DanglingPolicy - A dangling policy where the assertion is referencing a role
// name that doesn't exist in the domain
//
type DanglingPolicy struct {
PolicyName EntityName `json:"policyName"`
RoleName EntityName `json:"roleName"`
}
//
// NewDanglingPolicy - creates an initialized DanglingPolicy instance, returns a pointer to it
//
func NewDanglingPolicy(init ...*DanglingPolicy) *DanglingPolicy {
var o *DanglingPolicy
if len(init) == 1 {
o = init[0]
} else {
o = new(DanglingPolicy)
}
return o
}
type rawDanglingPolicy DanglingPolicy
//
// UnmarshalJSON is defined for proper JSON decoding of a DanglingPolicy
//
func (self *DanglingPolicy) UnmarshalJSON(b []byte) error {
var m rawDanglingPolicy
err := json.Unmarshal(b, &m)
if err == nil {
o := DanglingPolicy(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *DanglingPolicy) Validate() error {
if self.PolicyName == "" {
return fmt.Errorf("DanglingPolicy.policyName is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "EntityName", self.PolicyName)
if !val.Valid {
return fmt.Errorf("DanglingPolicy.policyName does not contain a valid EntityName (%v)", val.Error)
}
}
if self.RoleName == "" {
return fmt.Errorf("DanglingPolicy.roleName is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "EntityName", self.RoleName)
if !val.Valid {
return fmt.Errorf("DanglingPolicy.roleName does not contain a valid EntityName (%v)", val.Error)
}
}
return nil
}
//
// DomainDataCheck - Domain data object representing the results of a check
// operation looking for dangling roles, policies and trust relationships that
// are set either on tenant or provider side only
//
type DomainDataCheck struct {
//
// Names of roles not specified in any assertion. Might be empty or null if no
// dangling roles.
//
DanglingRoles []EntityName `json:"danglingRoles,omitempty" rdl:"optional"`
//
// Policy+role tuples where role doesnt exist. Might be empty or null if no
// dangling policies.
//
DanglingPolicies []*DanglingPolicy `json:"danglingPolicies,omitempty" rdl:"optional"`
//
// total number of policies
//
PolicyCount int32 `json:"policyCount"`
//
// total number of assertions
//
AssertionCount int32 `json:"assertionCount"`
//
// total number of assertions containing roles as wildcards
//
RoleWildCardCount int32 `json:"roleWildCardCount"`
//
// Service names (domain.service) that dont contain trust role if this is a
// tenant domain. Might be empty or null, if not a tenant or if all providers
// support this tenant.
//
ProvidersWithoutTrust []ServiceName `json:"providersWithoutTrust,omitempty" rdl:"optional"`
//
// Names of Tenant domains that dont contain assume role assertions if this is
// a provider domain. Might be empty or null, if not a provider or if all
// tenants support use this provider.
//
TenantsWithoutAssumeRole []DomainName `json:"tenantsWithoutAssumeRole,omitempty" rdl:"optional"`
}
//
// NewDomainDataCheck - creates an initialized DomainDataCheck instance, returns a pointer to it
//
func NewDomainDataCheck(init ...*DomainDataCheck) *DomainDataCheck {
var o *DomainDataCheck
if len(init) == 1 {
o = init[0]
} else {
o = new(DomainDataCheck)
}
return o
}
type rawDomainDataCheck DomainDataCheck
//
// UnmarshalJSON is defined for proper JSON decoding of a DomainDataCheck
//
func (self *DomainDataCheck) UnmarshalJSON(b []byte) error {
var m rawDomainDataCheck
err := json.Unmarshal(b, &m)
if err == nil {
o := DomainDataCheck(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *DomainDataCheck) Validate() error {
return nil
}
//
// Entity - An entity is a name and a structured value. some entity
// names/prefixes are reserved (i.e. "role", "policy", "meta", "domain",
// "service")
//
type Entity struct {
//
// name of the entity object
//
Name EntityName `json:"name"`
//
// value of the entity
//
Value rdl.Struct `json:"value"`
}
//
// NewEntity - creates an initialized Entity instance, returns a pointer to it
//
func NewEntity(init ...*Entity) *Entity {
var o *Entity
if len(init) == 1 {
o = init[0]
} else {
o = new(Entity)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *Entity) Init() *Entity {
if self.Value == nil {
self.Value = make(rdl.Struct)
}
return self
}
type rawEntity Entity
//
// UnmarshalJSON is defined for proper JSON decoding of a Entity
//
func (self *Entity) UnmarshalJSON(b []byte) error {
var m rawEntity
err := json.Unmarshal(b, &m)
if err == nil {
o := Entity(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Entity) Validate() error {
if self.Name == "" {
return fmt.Errorf("Entity.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "EntityName", self.Name)
if !val.Valid {
return fmt.Errorf("Entity.name does not contain a valid EntityName (%v)", val.Error)
}
}
if self.Value == nil {
return fmt.Errorf("Entity: Missing required field: value")
}
return nil
}
//
// EntityList - The representation for an enumeration of entities in the
// namespace
//
type EntityList struct {
//
// list of entity names
//
Names []EntityName `json:"names"`
}
//
// NewEntityList - creates an initialized EntityList instance, returns a pointer to it
//
func NewEntityList(init ...*EntityList) *EntityList {
var o *EntityList
if len(init) == 1 {
o = init[0]
} else {
o = new(EntityList)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *EntityList) Init() *EntityList {
if self.Names == nil {
self.Names = make([]EntityName, 0)
}
return self
}
type rawEntityList EntityList
//
// UnmarshalJSON is defined for proper JSON decoding of a EntityList
//
func (self *EntityList) UnmarshalJSON(b []byte) error {
var m rawEntityList
err := json.Unmarshal(b, &m)
if err == nil {
o := EntityList(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *EntityList) Validate() error {
if self.Names == nil {
return fmt.Errorf("EntityList: Missing required field: names")
}
return nil
}
//
// PolicyList - The representation for an enumeration of policies in the
// namespace, with pagination.
//
type PolicyList struct {
//
// list of policy names
//
Names []EntityName `json:"names"`
//
// if the response is a paginated list, this attribute specifies the value to
// be used in the next policy list request as the value for the skip query
// parameter.
//
Next string `json:"next,omitempty" rdl:"optional"`
}
//
// NewPolicyList - creates an initialized PolicyList instance, returns a pointer to it
//
func NewPolicyList(init ...*PolicyList) *PolicyList {
var o *PolicyList
if len(init) == 1 {
o = init[0]
} else {
o = new(PolicyList)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *PolicyList) Init() *PolicyList {
if self.Names == nil {
self.Names = make([]EntityName, 0)
}
return self
}
type rawPolicyList PolicyList
//
// UnmarshalJSON is defined for proper JSON decoding of a PolicyList
//
func (self *PolicyList) UnmarshalJSON(b []byte) error {
var m rawPolicyList
err := json.Unmarshal(b, &m)
if err == nil {
o := PolicyList(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *PolicyList) Validate() error {
if self.Names == nil {
return fmt.Errorf("PolicyList: Missing required field: names")
}
return nil
}
//
// Tenancy - A representation of tenant.
//
type Tenancy struct {
//
// the domain that is to get a tenancy
//
Domain DomainName `json:"domain"`
//
// the provider service on which the tenancy is to reside
//
Service ServiceName `json:"service"`
//
// registered resource groups for this tenant
//
ResourceGroups []EntityName `json:"resourceGroups,omitempty" rdl:"optional"`
}
//
// NewTenancy - creates an initialized Tenancy instance, returns a pointer to it
//
func NewTenancy(init ...*Tenancy) *Tenancy {
var o *Tenancy
if len(init) == 1 {
o = init[0]
} else {
o = new(Tenancy)
}
return o
}
type rawTenancy Tenancy
//
// UnmarshalJSON is defined for proper JSON decoding of a Tenancy
//
func (self *Tenancy) UnmarshalJSON(b []byte) error {
var m rawTenancy
err := json.Unmarshal(b, &m)
if err == nil {
o := Tenancy(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Tenancy) Validate() error {
if self.Domain == "" {
return fmt.Errorf("Tenancy.domain is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Domain)
if !val.Valid {
return fmt.Errorf("Tenancy.domain does not contain a valid DomainName (%v)", val.Error)
}
}
if self.Service == "" {
return fmt.Errorf("Tenancy.service is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "ServiceName", self.Service)
if !val.Valid {
return fmt.Errorf("Tenancy.service does not contain a valid ServiceName (%v)", val.Error)
}
}
return nil
}
//
// TenancyResourceGroup -
//
type TenancyResourceGroup struct {
//
// the domain that is to get a tenancy
//
Domain DomainName `json:"domain"`
//
// the provider service on which the tenancy is to reside
//
Service ServiceName `json:"service"`
//
// registered resource group for this tenant
//
ResourceGroup EntityName `json:"resourceGroup"`
}
//
// NewTenancyResourceGroup - creates an initialized TenancyResourceGroup instance, returns a pointer to it
//
func NewTenancyResourceGroup(init ...*TenancyResourceGroup) *TenancyResourceGroup {
var o *TenancyResourceGroup
if len(init) == 1 {
o = init[0]
} else {
o = new(TenancyResourceGroup)
}
return o
}
type rawTenancyResourceGroup TenancyResourceGroup
//
// UnmarshalJSON is defined for proper JSON decoding of a TenancyResourceGroup
//
func (self *TenancyResourceGroup) UnmarshalJSON(b []byte) error {
var m rawTenancyResourceGroup
err := json.Unmarshal(b, &m)
if err == nil {
o := TenancyResourceGroup(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *TenancyResourceGroup) Validate() error {
if self.Domain == "" {
return fmt.Errorf("TenancyResourceGroup.domain is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Domain)
if !val.Valid {
return fmt.Errorf("TenancyResourceGroup.domain does not contain a valid DomainName (%v)", val.Error)
}
}
if self.Service == "" {
return fmt.Errorf("TenancyResourceGroup.service is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "ServiceName", self.Service)
if !val.Valid {
return fmt.Errorf("TenancyResourceGroup.service does not contain a valid ServiceName (%v)", val.Error)
}
}
if self.ResourceGroup == "" {
return fmt.Errorf("TenancyResourceGroup.resourceGroup is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "EntityName", self.ResourceGroup)
if !val.Valid {
return fmt.Errorf("TenancyResourceGroup.resourceGroup does not contain a valid EntityName (%v)", val.Error)
}
}
return nil
}
//
// TenantRoleAction - A representation of tenant role action.
//
type TenantRoleAction struct {
//
// name of the role
//
Role SimpleName `json:"role"`
//
// action value for the generated policy assertion
//
Action string `json:"action"`
}
//
// NewTenantRoleAction - creates an initialized TenantRoleAction instance, returns a pointer to it
//
func NewTenantRoleAction(init ...*TenantRoleAction) *TenantRoleAction {
var o *TenantRoleAction
if len(init) == 1 {
o = init[0]
} else {
o = new(TenantRoleAction)
}
return o
}
type rawTenantRoleAction TenantRoleAction
//
// UnmarshalJSON is defined for proper JSON decoding of a TenantRoleAction
//
func (self *TenantRoleAction) UnmarshalJSON(b []byte) error {
var m rawTenantRoleAction
err := json.Unmarshal(b, &m)
if err == nil {
o := TenantRoleAction(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *TenantRoleAction) Validate() error {
if self.Role == "" {
return fmt.Errorf("TenantRoleAction.role is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "SimpleName", self.Role)
if !val.Valid {
return fmt.Errorf("TenantRoleAction.role does not contain a valid SimpleName (%v)", val.Error)
}
}
if self.Action == "" {
return fmt.Errorf("TenantRoleAction.action is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.Action)
if !val.Valid {
return fmt.Errorf("TenantRoleAction.action does not contain a valid String (%v)", val.Error)
}
}
return nil
}
//
// TenantRoles - A representation of tenant roles to be provisioned.
//
type TenantRoles struct {
//
// name of the provider domain
//
Domain DomainName `json:"domain"`
//
// name of the provider service
//
Service SimpleName `json:"service"`
//
// name of the tenant domain
//
Tenant DomainName `json:"tenant"`
//
// the role/action pairs to provision
//
Roles []*TenantRoleAction `json:"roles"`
}
//
// NewTenantRoles - creates an initialized TenantRoles instance, returns a pointer to it
//
func NewTenantRoles(init ...*TenantRoles) *TenantRoles {
var o *TenantRoles
if len(init) == 1 {
o = init[0]
} else {
o = new(TenantRoles)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *TenantRoles) Init() *TenantRoles {
if self.Roles == nil {
self.Roles = make([]*TenantRoleAction, 0)
}
return self
}
type rawTenantRoles TenantRoles
//
// UnmarshalJSON is defined for proper JSON decoding of a TenantRoles
//
func (self *TenantRoles) UnmarshalJSON(b []byte) error {
var m rawTenantRoles
err := json.Unmarshal(b, &m)
if err == nil {
o := TenantRoles(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *TenantRoles) Validate() error {
if self.Domain == "" {
return fmt.Errorf("TenantRoles.domain is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Domain)
if !val.Valid {
return fmt.Errorf("TenantRoles.domain does not contain a valid DomainName (%v)", val.Error)
}
}
if self.Service == "" {
return fmt.Errorf("TenantRoles.service is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "SimpleName", self.Service)
if !val.Valid {
return fmt.Errorf("TenantRoles.service does not contain a valid SimpleName (%v)", val.Error)
}
}
if self.Tenant == "" {
return fmt.Errorf("TenantRoles.tenant is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Tenant)
if !val.Valid {
return fmt.Errorf("TenantRoles.tenant does not contain a valid DomainName (%v)", val.Error)
}
}
if self.Roles == nil {
return fmt.Errorf("TenantRoles: Missing required field: roles")
}
return nil
}
//
// TenantResourceGroupRoles - A representation of tenant roles for resource
// groups to be provisioned.
//
type TenantResourceGroupRoles struct {
//
// name of the provider domain
//
Domain DomainName `json:"domain"`
//
// name of the provider service
//
Service SimpleName `json:"service"`
//
// name of the tenant domain
//
Tenant DomainName `json:"tenant"`
//
// the role/action pairs to provision
//
Roles []*TenantRoleAction `json:"roles"`
//
// tenant resource group
//
ResourceGroup EntityName `json:"resourceGroup"`
}
//
// NewTenantResourceGroupRoles - creates an initialized TenantResourceGroupRoles instance, returns a pointer to it
//
func NewTenantResourceGroupRoles(init ...*TenantResourceGroupRoles) *TenantResourceGroupRoles {
var o *TenantResourceGroupRoles
if len(init) == 1 {
o = init[0]
} else {
o = new(TenantResourceGroupRoles)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *TenantResourceGroupRoles) Init() *TenantResourceGroupRoles {
if self.Roles == nil {
self.Roles = make([]*TenantRoleAction, 0)
}
return self
}
type rawTenantResourceGroupRoles TenantResourceGroupRoles
//
// UnmarshalJSON is defined for proper JSON decoding of a TenantResourceGroupRoles
//
func (self *TenantResourceGroupRoles) UnmarshalJSON(b []byte) error {
var m rawTenantResourceGroupRoles
err := json.Unmarshal(b, &m)
if err == nil {
o := TenantResourceGroupRoles(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *TenantResourceGroupRoles) Validate() error {
if self.Domain == "" {
return fmt.Errorf("TenantResourceGroupRoles.domain is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Domain)
if !val.Valid {
return fmt.Errorf("TenantResourceGroupRoles.domain does not contain a valid DomainName (%v)", val.Error)
}
}
if self.Service == "" {
return fmt.Errorf("TenantResourceGroupRoles.service is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "SimpleName", self.Service)
if !val.Valid {
return fmt.Errorf("TenantResourceGroupRoles.service does not contain a valid SimpleName (%v)", val.Error)
}
}
if self.Tenant == "" {
return fmt.Errorf("TenantResourceGroupRoles.tenant is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Tenant)
if !val.Valid {
return fmt.Errorf("TenantResourceGroupRoles.tenant does not contain a valid DomainName (%v)", val.Error)
}
}
if self.Roles == nil {
return fmt.Errorf("TenantResourceGroupRoles: Missing required field: roles")
}
if self.ResourceGroup == "" {
return fmt.Errorf("TenantResourceGroupRoles.resourceGroup is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "EntityName", self.ResourceGroup)
if !val.Valid {
return fmt.Errorf("TenantResourceGroupRoles.resourceGroup does not contain a valid EntityName (%v)", val.Error)
}
}
return nil
}
//
// ProviderResourceGroupRoles - A representation of provider roles to be
// provisioned.
//
type ProviderResourceGroupRoles struct {
//
// name of the provider domain
//
Domain DomainName `json:"domain"`
//
// name of the provider service
//
Service SimpleName `json:"service"`
//
// name of the tenant domain
//
Tenant DomainName `json:"tenant"`
//
// the role/action pairs to provision
//
Roles []*TenantRoleAction `json:"roles"`
//
// tenant resource group
//
ResourceGroup EntityName `json:"resourceGroup"`
}
//
// NewProviderResourceGroupRoles - creates an initialized ProviderResourceGroupRoles instance, returns a pointer to it
//
func NewProviderResourceGroupRoles(init ...*ProviderResourceGroupRoles) *ProviderResourceGroupRoles {
var o *ProviderResourceGroupRoles
if len(init) == 1 {
o = init[0]
} else {
o = new(ProviderResourceGroupRoles)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *ProviderResourceGroupRoles) Init() *ProviderResourceGroupRoles {
if self.Roles == nil {
self.Roles = make([]*TenantRoleAction, 0)
}
return self
}
type rawProviderResourceGroupRoles ProviderResourceGroupRoles
//
// UnmarshalJSON is defined for proper JSON decoding of a ProviderResourceGroupRoles
//
func (self *ProviderResourceGroupRoles) UnmarshalJSON(b []byte) error {
var m rawProviderResourceGroupRoles
err := json.Unmarshal(b, &m)
if err == nil {
o := ProviderResourceGroupRoles(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *ProviderResourceGroupRoles) Validate() error {
if self.Domain == "" {
return fmt.Errorf("ProviderResourceGroupRoles.domain is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Domain)
if !val.Valid {
return fmt.Errorf("ProviderResourceGroupRoles.domain does not contain a valid DomainName (%v)", val.Error)
}
}
if self.Service == "" {
return fmt.Errorf("ProviderResourceGroupRoles.service is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "SimpleName", self.Service)
if !val.Valid {
return fmt.Errorf("ProviderResourceGroupRoles.service does not contain a valid SimpleName (%v)", val.Error)
}
}
if self.Tenant == "" {
return fmt.Errorf("ProviderResourceGroupRoles.tenant is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Tenant)
if !val.Valid {
return fmt.Errorf("ProviderResourceGroupRoles.tenant does not contain a valid DomainName (%v)", val.Error)
}
}
if self.Roles == nil {
return fmt.Errorf("ProviderResourceGroupRoles: Missing required field: roles")
}
if self.ResourceGroup == "" {
return fmt.Errorf("ProviderResourceGroupRoles.resourceGroup is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "EntityName", self.ResourceGroup)
if !val.Valid {
return fmt.Errorf("ProviderResourceGroupRoles.resourceGroup does not contain a valid EntityName (%v)", val.Error)
}
}
return nil
}
//
// Access - Access can be checked and returned as this resource.
//
type Access struct {
//
// true (allowed) or false (denied)
//
Granted bool `json:"granted"`
}
//
// NewAccess - creates an initialized Access instance, returns a pointer to it
//
func NewAccess(init ...*Access) *Access {
var o *Access
if len(init) == 1 {
o = init[0]
} else {
o = new(Access)
}
return o
}
type rawAccess Access
//
// UnmarshalJSON is defined for proper JSON decoding of a Access
//
func (self *Access) UnmarshalJSON(b []byte) error {
var m rawAccess
err := json.Unmarshal(b, &m)
if err == nil {
o := Access(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Access) Validate() error {
return nil
}
//
// ResourceAccess -
//
type ResourceAccess struct {
Principal EntityName `json:"principal"`
Assertions []*Assertion `json:"assertions"`
}
//
// NewResourceAccess - creates an initialized ResourceAccess instance, returns a pointer to it
//
func NewResourceAccess(init ...*ResourceAccess) *ResourceAccess {
var o *ResourceAccess
if len(init) == 1 {
o = init[0]
} else {
o = new(ResourceAccess)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *ResourceAccess) Init() *ResourceAccess {
if self.Assertions == nil {
self.Assertions = make([]*Assertion, 0)
}
return self
}
type rawResourceAccess ResourceAccess
//
// UnmarshalJSON is defined for proper JSON decoding of a ResourceAccess
//
func (self *ResourceAccess) UnmarshalJSON(b []byte) error {
var m rawResourceAccess
err := json.Unmarshal(b, &m)
if err == nil {
o := ResourceAccess(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *ResourceAccess) Validate() error {
if self.Principal == "" {
return fmt.Errorf("ResourceAccess.principal is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "EntityName", self.Principal)
if !val.Valid {
return fmt.Errorf("ResourceAccess.principal does not contain a valid EntityName (%v)", val.Error)
}
}
if self.Assertions == nil {
return fmt.Errorf("ResourceAccess: Missing required field: assertions")
}
return nil
}
//
// ResourceAccessList -
//
type ResourceAccessList struct {
Resources []*ResourceAccess `json:"resources"`
}
//
// NewResourceAccessList - creates an initialized ResourceAccessList instance, returns a pointer to it
//
func NewResourceAccessList(init ...*ResourceAccessList) *ResourceAccessList {
var o *ResourceAccessList
if len(init) == 1 {
o = init[0]
} else {
o = new(ResourceAccessList)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *ResourceAccessList) Init() *ResourceAccessList {
if self.Resources == nil {
self.Resources = make([]*ResourceAccess, 0)
}
return self
}
type rawResourceAccessList ResourceAccessList
//
// UnmarshalJSON is defined for proper JSON decoding of a ResourceAccessList
//
func (self *ResourceAccessList) UnmarshalJSON(b []byte) error {
var m rawResourceAccessList
err := json.Unmarshal(b, &m)
if err == nil {
o := ResourceAccessList(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *ResourceAccessList) Validate() error {
if self.Resources == nil {
return fmt.Errorf("ResourceAccessList: Missing required field: resources")
}
return nil
}
//
// DomainModified - Tuple of domain-name and modification time-stamps. This
// object is returned when the caller has requested list of domains modified
// since a specific timestamp.
//
type DomainModified struct {
//
// name of the domain
//
Name DomainName `json:"name"`
//
// last modified timestamp of the domain
//
Modified int64 `json:"modified"`
}
//
// NewDomainModified - creates an initialized DomainModified instance, returns a pointer to it
//
func NewDomainModified(init ...*DomainModified) *DomainModified {
var o *DomainModified
if len(init) == 1 {
o = init[0]
} else {
o = new(DomainModified)
}
return o
}
type rawDomainModified DomainModified
//
// UnmarshalJSON is defined for proper JSON decoding of a DomainModified
//
func (self *DomainModified) UnmarshalJSON(b []byte) error {
var m rawDomainModified
err := json.Unmarshal(b, &m)
if err == nil {
o := DomainModified(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *DomainModified) Validate() error {
if self.Name == "" {
return fmt.Errorf("DomainModified.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Name)
if !val.Valid {
return fmt.Errorf("DomainModified.name does not contain a valid DomainName (%v)", val.Error)
}
}
return nil
}
//
// DomainModifiedList - A list of {domain, modified-timestamp} tuples.
//
type DomainModifiedList struct {
//
// list of modified domains
//
NameModList []*DomainModified `json:"nameModList"`
}
//
// NewDomainModifiedList - creates an initialized DomainModifiedList instance, returns a pointer to it
//
func NewDomainModifiedList(init ...*DomainModifiedList) *DomainModifiedList {
var o *DomainModifiedList
if len(init) == 1 {
o = init[0]
} else {
o = new(DomainModifiedList)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *DomainModifiedList) Init() *DomainModifiedList {
if self.NameModList == nil {
self.NameModList = make([]*DomainModified, 0)
}
return self
}
type rawDomainModifiedList DomainModifiedList
//
// UnmarshalJSON is defined for proper JSON decoding of a DomainModifiedList
//
func (self *DomainModifiedList) UnmarshalJSON(b []byte) error {
var m rawDomainModifiedList
err := json.Unmarshal(b, &m)
if err == nil {
o := DomainModifiedList(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *DomainModifiedList) Validate() error {
if self.NameModList == nil {
return fmt.Errorf("DomainModifiedList: Missing required field: nameModList")
}
return nil
}
//
// DomainPolicies - We need to include the name of the domain in this struct
// since this data will be passed back to ZPU through ZTS so we need to sign not
// only the list of policies but also the corresponding domain name that the
// policies belong to.
//
type DomainPolicies struct {
//
// name of the domain
//
Domain DomainName `json:"domain"`
//
// list of policies defined in this server
//
Policies []*Policy `json:"policies"`
}
//
// NewDomainPolicies - creates an initialized DomainPolicies instance, returns a pointer to it
//
func NewDomainPolicies(init ...*DomainPolicies) *DomainPolicies {
var o *DomainPolicies
if len(init) == 1 {
o = init[0]
} else {
o = new(DomainPolicies)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *DomainPolicies) Init() *DomainPolicies {
if self.Policies == nil {
self.Policies = make([]*Policy, 0)
}
return self
}
type rawDomainPolicies DomainPolicies
//
// UnmarshalJSON is defined for proper JSON decoding of a DomainPolicies
//
func (self *DomainPolicies) UnmarshalJSON(b []byte) error {
var m rawDomainPolicies
err := json.Unmarshal(b, &m)
if err == nil {
o := DomainPolicies(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *DomainPolicies) Validate() error {
if self.Domain == "" {
return fmt.Errorf("DomainPolicies.domain is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Domain)
if !val.Valid {
return fmt.Errorf("DomainPolicies.domain does not contain a valid DomainName (%v)", val.Error)
}
}
if self.Policies == nil {
return fmt.Errorf("DomainPolicies: Missing required field: policies")
}
return nil
}
//
// SignedPolicies - A signed bulk transfer of policies. The data is signed with
// server's private key.
//
type SignedPolicies struct {
//
// list of policies defined in a domain
//
Contents *DomainPolicies `json:"contents"`
//
// signature generated based on the domain policies object
//
Signature string `json:"signature"`
//
// the identifier of the key used to generate the signature
//
KeyId string `json:"keyId"`
}
//
// NewSignedPolicies - creates an initialized SignedPolicies instance, returns a pointer to it
//
func NewSignedPolicies(init ...*SignedPolicies) *SignedPolicies {
var o *SignedPolicies
if len(init) == 1 {
o = init[0]
} else {
o = new(SignedPolicies)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *SignedPolicies) Init() *SignedPolicies {
if self.Contents == nil {
self.Contents = NewDomainPolicies()
}
return self
}
type rawSignedPolicies SignedPolicies
//
// UnmarshalJSON is defined for proper JSON decoding of a SignedPolicies
//
func (self *SignedPolicies) UnmarshalJSON(b []byte) error {
var m rawSignedPolicies
err := json.Unmarshal(b, &m)
if err == nil {
o := SignedPolicies(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *SignedPolicies) Validate() error {
if self.Contents == nil {
return fmt.Errorf("SignedPolicies: Missing required field: contents")
}
if self.Signature == "" {
return fmt.Errorf("SignedPolicies.signature is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.Signature)
if !val.Valid {
return fmt.Errorf("SignedPolicies.signature does not contain a valid String (%v)", val.Error)
}
}
if self.KeyId == "" {
return fmt.Errorf("SignedPolicies.keyId is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.KeyId)
if !val.Valid {
return fmt.Errorf("SignedPolicies.keyId does not contain a valid String (%v)", val.Error)
}
}
return nil
}
//
// DomainData - A domain object that includes its roles, policies and services.
//
type DomainData struct {
//
// name of the domain
//
Name DomainName `json:"name"`
//
// associated cloud (i.e. aws) account id
//
Account string `json:"account,omitempty" rdl:"optional"`
//
// associated product id
//
YpmId *int32 `json:"ypmId,omitempty" rdl:"optional"`
//
// domain enabled state
//
Enabled *bool `json:"enabled,omitempty" rdl:"optional"`
//
// list of roles in the domain
//
Roles []*Role `json:"roles"`
//
// list of policies in the domain signed with ZMS private key
//
Policies *SignedPolicies `json:"policies"`
//
// list of services in the domain
//
Services []*ServiceIdentity `json:"services"`
//
// list of entities in the domain
//
Entities []*Entity `json:"entities"`
//
// last modification timestamp
//
Modified rdl.Timestamp `json:"modified"`
//
// associated application id
//
ApplicationId string `json:"applicationId,omitempty" rdl:"optional"`
}
//
// NewDomainData - creates an initialized DomainData instance, returns a pointer to it
//
func NewDomainData(init ...*DomainData) *DomainData {
var o *DomainData
if len(init) == 1 {
o = init[0]
} else {
o = new(DomainData)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *DomainData) Init() *DomainData {
if self.Roles == nil {
self.Roles = make([]*Role, 0)
}
if self.Policies == nil {
self.Policies = NewSignedPolicies()
}
if self.Services == nil {
self.Services = make([]*ServiceIdentity, 0)
}
if self.Entities == nil {
self.Entities = make([]*Entity, 0)
}
return self
}
type rawDomainData DomainData
//
// UnmarshalJSON is defined for proper JSON decoding of a DomainData
//
func (self *DomainData) UnmarshalJSON(b []byte) error {
var m rawDomainData
err := json.Unmarshal(b, &m)
if err == nil {
o := DomainData(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *DomainData) Validate() error {
if self.Name == "" {
return fmt.Errorf("DomainData.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Name)
if !val.Valid {
return fmt.Errorf("DomainData.name does not contain a valid DomainName (%v)", val.Error)
}
}
if self.Roles == nil {
return fmt.Errorf("DomainData: Missing required field: roles")
}
if self.Policies == nil {
return fmt.Errorf("DomainData: Missing required field: policies")
}
if self.Services == nil {
return fmt.Errorf("DomainData: Missing required field: services")
}
if self.Entities == nil {
return fmt.Errorf("DomainData: Missing required field: entities")
}
if self.Modified.IsZero() {
return fmt.Errorf("DomainData: Missing required field: modified")
}
return nil
}
//
// SignedDomain - A domain object signed with server's private key
//
type SignedDomain struct {
//
// domain object with its roles, policies and services
//
Domain *DomainData `json:"domain"`
//
// signature generated based on the domain object
//
Signature string `json:"signature"`
//
// the identifier of the key used to generate the signature
//
KeyId string `json:"keyId"`
}
//
// NewSignedDomain - creates an initialized SignedDomain instance, returns a pointer to it
//
func NewSignedDomain(init ...*SignedDomain) *SignedDomain {
var o *SignedDomain
if len(init) == 1 {
o = init[0]
} else {
o = new(SignedDomain)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *SignedDomain) Init() *SignedDomain {
if self.Domain == nil {
self.Domain = NewDomainData()
}
return self
}
type rawSignedDomain SignedDomain
//
// UnmarshalJSON is defined for proper JSON decoding of a SignedDomain
//
func (self *SignedDomain) UnmarshalJSON(b []byte) error {
var m rawSignedDomain
err := json.Unmarshal(b, &m)
if err == nil {
o := SignedDomain(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *SignedDomain) Validate() error {
if self.Domain == nil {
return fmt.Errorf("SignedDomain: Missing required field: domain")
}
if self.Signature == "" {
return fmt.Errorf("SignedDomain.signature is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.Signature)
if !val.Valid {
return fmt.Errorf("SignedDomain.signature does not contain a valid String (%v)", val.Error)
}
}
if self.KeyId == "" {
return fmt.Errorf("SignedDomain.keyId is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.KeyId)
if !val.Valid {
return fmt.Errorf("SignedDomain.keyId does not contain a valid String (%v)", val.Error)
}
}
return nil
}
//
// SignedDomains - A list of signed domain objects
//
type SignedDomains struct {
Domains []*SignedDomain `json:"domains"`
}
//
// NewSignedDomains - creates an initialized SignedDomains instance, returns a pointer to it
//
func NewSignedDomains(init ...*SignedDomains) *SignedDomains {
var o *SignedDomains
if len(init) == 1 {
o = init[0]
} else {
o = new(SignedDomains)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *SignedDomains) Init() *SignedDomains {
if self.Domains == nil {
self.Domains = make([]*SignedDomain, 0)
}
return self
}
type rawSignedDomains SignedDomains
//
// UnmarshalJSON is defined for proper JSON decoding of a SignedDomains
//
func (self *SignedDomains) UnmarshalJSON(b []byte) error {
var m rawSignedDomains
err := json.Unmarshal(b, &m)
if err == nil {
o := SignedDomains(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *SignedDomains) Validate() error {
if self.Domains == nil {
return fmt.Errorf("SignedDomains: Missing required field: domains")
}
return nil
}
//
// UserToken - A user token generated based on user's credentials
//
type UserToken struct {
//
// Signed user token identifying a specific authenticated user
//
Token SignedToken `json:"token"`
//
// Authorization header name for the token
//
Header string `json:"header,omitempty" rdl:"optional"`
}
//
// NewUserToken - creates an initialized UserToken instance, returns a pointer to it
//
func NewUserToken(init ...*UserToken) *UserToken {
var o *UserToken
if len(init) == 1 {
o = init[0]
} else {
o = new(UserToken)
}
return o
}
type rawUserToken UserToken
//
// UnmarshalJSON is defined for proper JSON decoding of a UserToken
//
func (self *UserToken) UnmarshalJSON(b []byte) error {
var m rawUserToken
err := json.Unmarshal(b, &m)
if err == nil {
o := UserToken(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *UserToken) Validate() error {
if self.Token == "" {
return fmt.Errorf("UserToken.token is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "SignedToken", self.Token)
if !val.Valid {
return fmt.Errorf("UserToken.token does not contain a valid SignedToken (%v)", val.Error)
}
}
return nil
}
//
// ServicePrincipal - A service principal object identifying a given service.
//
type ServicePrincipal struct {
//
// name of the domain
//
Domain DomainName `json:"domain"`
//
// name of the service
//
Service EntityName `json:"service"`
//
// service's signed token
//
Token SignedToken `json:"token"`
}
//
// NewServicePrincipal - creates an initialized ServicePrincipal instance, returns a pointer to it
//
func NewServicePrincipal(init ...*ServicePrincipal) *ServicePrincipal {
var o *ServicePrincipal
if len(init) == 1 {
o = init[0]
} else {
o = new(ServicePrincipal)
}
return o
}
type rawServicePrincipal ServicePrincipal
//
// UnmarshalJSON is defined for proper JSON decoding of a ServicePrincipal
//
func (self *ServicePrincipal) UnmarshalJSON(b []byte) error {
var m rawServicePrincipal
err := json.Unmarshal(b, &m)
if err == nil {
o := ServicePrincipal(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *ServicePrincipal) Validate() error {
if self.Domain == "" {
return fmt.Errorf("ServicePrincipal.domain is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Domain)
if !val.Valid {
return fmt.Errorf("ServicePrincipal.domain does not contain a valid DomainName (%v)", val.Error)
}
}
if self.Service == "" {
return fmt.Errorf("ServicePrincipal.service is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "EntityName", self.Service)
if !val.Valid {
return fmt.Errorf("ServicePrincipal.service does not contain a valid EntityName (%v)", val.Error)
}
}
if self.Token == "" {
return fmt.Errorf("ServicePrincipal.token is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "SignedToken", self.Token)
if !val.Valid {
return fmt.Errorf("ServicePrincipal.token does not contain a valid SignedToken (%v)", val.Error)
}
}
return nil
}
//
// User - The representation for a user
//
type User struct {
//
// name of the user
//
Name SimpleName `json:"name"`
}
//
// NewUser - creates an initialized User instance, returns a pointer to it
//
func NewUser(init ...*User) *User {
var o *User
if len(init) == 1 {
o = init[0]
} else {
o = new(User)
}
return o
}
type rawUser User
//
// UnmarshalJSON is defined for proper JSON decoding of a User
//
func (self *User) UnmarshalJSON(b []byte) error {
var m rawUser
err := json.Unmarshal(b, &m)
if err == nil {
o := User(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *User) Validate() error {
if self.Name == "" {
return fmt.Errorf("User.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "SimpleName", self.Name)
if !val.Valid {
return fmt.Errorf("User.name does not contain a valid SimpleName (%v)", val.Error)
}
}
return nil
}
//
// UserList -
//
type UserList struct {
//
// list of user names
//
Names []SimpleName `json:"names"`
}
//
// NewUserList - creates an initialized UserList instance, returns a pointer to it
//
func NewUserList(init ...*UserList) *UserList {
var o *UserList
if len(init) == 1 {
o = init[0]
} else {
o = new(UserList)
}
return o.Init()
}
//
// Init - sets up the instance according to its default field values, if any
//
func (self *UserList) Init() *UserList {
if self.Names == nil {
self.Names = make([]SimpleName, 0)
}
return self
}
type rawUserList UserList
//
// UnmarshalJSON is defined for proper JSON decoding of a UserList
//
func (self *UserList) UnmarshalJSON(b []byte) error {
var m rawUserList
err := json.Unmarshal(b, &m)
if err == nil {
o := UserList(m)
*self = *((&o).Init())
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *UserList) Validate() error {
if self.Names == nil {
return fmt.Errorf("UserList: Missing required field: names")
}
return nil
}
//
// Quota - The representation for a quota object
//
type Quota struct {
//
// name of the domain object
//
Name DomainName `json:"name"`
//
// number of subdomains allowed (applied at top level domain level)
//
Subdomain int32 `json:"subdomain"`
//
// number of roles allowed
//
Role int32 `json:"role"`
//
// number of members a role may have
//
RoleMember int32 `json:"roleMember"`
//
// number of policies allowed
//
Policy int32 `json:"policy"`
//
// total number of assertions a policy may have
//
Assertion int32 `json:"assertion"`
//
// total number of entity objects
//
Entity int32 `json:"entity"`
//
// number of services allowed
//
Service int32 `json:"service"`
//
// number of hosts allowed per service
//
ServiceHost int32 `json:"serviceHost"`
//
// number of public keys per service
//
PublicKey int32 `json:"publicKey"`
//
// the last modification timestamp of the quota object
//
Modified *rdl.Timestamp `json:"modified,omitempty" rdl:"optional"`
}
//
// NewQuota - creates an initialized Quota instance, returns a pointer to it
//
func NewQuota(init ...*Quota) *Quota {
var o *Quota
if len(init) == 1 {
o = init[0]
} else {
o = new(Quota)
}
return o
}
type rawQuota Quota
//
// UnmarshalJSON is defined for proper JSON decoding of a Quota
//
func (self *Quota) UnmarshalJSON(b []byte) error {
var m rawQuota
err := json.Unmarshal(b, &m)
if err == nil {
o := Quota(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Quota) Validate() error {
if self.Name == "" {
return fmt.Errorf("Quota.name is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "DomainName", self.Name)
if !val.Valid {
return fmt.Errorf("Quota.name does not contain a valid DomainName (%v)", val.Error)
}
}
return nil
}
//
// Status - The representation for a status object
//
type Status struct {
//
// status message code
//
Code int32 `json:"code"`
//
// status message of the server
//
Message string `json:"message"`
}
//
// NewStatus - creates an initialized Status instance, returns a pointer to it
//
func NewStatus(init ...*Status) *Status {
var o *Status
if len(init) == 1 {
o = init[0]
} else {
o = new(Status)
}
return o
}
type rawStatus Status
//
// UnmarshalJSON is defined for proper JSON decoding of a Status
//
func (self *Status) UnmarshalJSON(b []byte) error {
var m rawStatus
err := json.Unmarshal(b, &m)
if err == nil {
o := Status(m)
*self = o
err = self.Validate()
}
return err
}
//
// Validate - checks for missing required fields, etc
//
func (self *Status) Validate() error {
if self.Message == "" {
return fmt.Errorf("Status.message is missing but is a required field")
} else {
val := rdl.Validate(ZMSSchema(), "String", self.Message)
if !val.Valid {
return fmt.Errorf("Status.message does not contain a valid String (%v)", val.Error)
}
}
return nil
}
| {
"content_hash": "3bb52bbfc5659edb35fcbff946a83982",
"timestamp": "",
"source": "github",
"line_count": 4296,
"max_line_length": 118,
"avg_line_length": 22.25372439478585,
"alnum_prop": 0.6878935587121608,
"repo_name": "tatyano/athenz",
"id": "574fa8019afc19f4aee0afd947743ce7c60f3fc8",
"size": "95645",
"binary": false,
"copies": "1",
"ref": "refs/heads/tatyano",
"path": "clients/go/zms/model.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "42737"
},
{
"name": "Go",
"bytes": "716990"
},
{
"name": "HTML",
"bytes": "32049"
},
{
"name": "Java",
"bytes": "5109892"
},
{
"name": "JavaScript",
"bytes": "488664"
},
{
"name": "Makefile",
"bytes": "32516"
},
{
"name": "Perl",
"bytes": "951"
},
{
"name": "Shell",
"bytes": "41610"
}
],
"symlink_target": ""
} |
# Declarations in Program or Function Body (no-inner-declarations)
In JavaScript, prior to ES6, a function declaration is only allowed in the first level of a program or the body of another function, though parsers sometimes [erroneously accept them elsewhere](https://code.google.com/p/esprima/issues/detail?id=422). This only applies to function declarations; named or anonymous function expressions can occur anywhere an expression is permitted.
```js
// Good
function doSomething() { }
// Bad
if (test) {
function doSomethingElse () { }
}
function anotherThing() {
var fn;
if (test) {
// Good
fn = function expression() { };
// Bad
function declaration() { }
}
}
```
A variable declaration is permitted anywhere a statement can go, even nested deeply inside other blocks. This is often undesirable due to variable hoisting, and moving declarations to the root of the program or function body can increase clarity. Note that [block bindings](https://leanpub.com/understandinges6/read#leanpub-auto-block-bindings) (`let`, `const`) are not hoisted and therefore they are not affected by this rule.
```js
// Good
var foo = 42;
// Good
if (foo) {
let bar1;
}
// Bad
while (test) {
var bar2;
}
function doSomething() {
// Good
var baz = true;
// Bad
if (baz) {
var quux;
}
}
```
## Rule Details
This rule requires that function declarations and, optionally, variable declarations be in the root of a program or the body of a function.
### Options
This rule takes a single option to specify whether it should check just function declarations or both function and variable declarations. The default is `"functions"`. Setting it to `"both"` will apply the same rules to both types of declarations.
You can set the option in configuration like this:
```json
"no-inner-declarations": [2, "both"]
```
The following patterns are considered problems:
```js
/*eslint no-inner-declarations: 2*/
if (test) {
function doSomething() { } /*error Move function declaration to program root.*/
}
function doSomethingElse() {
if (test) {
function doAnotherThing() { } /*error Move function declaration to function body root.*/
}
}
```
With "both" option to check variable declarations, the following are considered problems:
```js
/*eslint no-inner-declarations: [2, "both"]*/
if (test) {
var foo = 42; /*error Move variable declaration to program root.*/
}
function doAnotherThing() {
if (test) {
var bar = 81; /*error Move variable declaration to function body root.*/
}
}
```
The following patterns are considered valid:
```js
/*eslint no-inner-declarations: 2*/
function doSomething() { }
function doSomethingElse() {
function doAnotherThing() { }
}
if (test) {
asyncCall(id, function (err, data) { });
}
var fn;
if (test) {
fn = function fnExpression() { };
}
var bar = 42;
if (test) {
let baz = 43;
}
function doAnotherThing() {
var baz = 81;
}
```
## When Not To Use It
The function declaration portion rule will be rendered obsolete when [block-scoped functions](https://bugzilla.mozilla.org/show_bug.cgi?id=585536) land in ES6, but until then, it should be left on to enforce valid constructions. Disable checking variable declarations when using [block-scoped-var](block-scoped-var.md) or if declaring variables in nested blocks is acceptable despite hoisting.
| {
"content_hash": "b0b9f7ad524d5aa9a9f6f9939840d79b",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 427,
"avg_line_length": 25.80597014925373,
"alnum_prop": 0.6934644303065356,
"repo_name": "winterbe/eslint",
"id": "627a8030a01a2a37be968ea28e12b4e1093341ee",
"size": "3458",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "docs/rules/no-inner-declarations.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "5313"
},
{
"name": "JavaScript",
"bytes": "3757057"
},
{
"name": "Shell",
"bytes": "928"
}
],
"symlink_target": ""
} |
package org.ayo.sample.menu.notify;
import android.app.Application;
import android.widget.Toast;
/**
* 只会显示一个toast 不会有延迟的toast
*
* @author pengjun
*/
public class ToasterDebug {
private static Application app;
public static void init(Application a){
app = a;
}
/**
* 唯一的toast
*/
private static Toast mToast = null;
private ToasterDebug() {
}
public static void toastLong(final String tip){
showToast(tip, Toast.LENGTH_LONG);
}
public static void toastShort(final String tip){
showToast(tip, Toast.LENGTH_SHORT);
}
private static void showToast(final int stringid, final int lastTime) {
if (mToast != null) {
// mToast.cancel();
} else {
mToast = Toast.makeText(app, stringid, lastTime);
}
mToast.setText(stringid);
mToast.show();
}
private static void showToast(final String tips, final int lastTime) {
if (mToast != null) {
// mToast.cancel();
} else {
mToast = Toast.makeText(app, tips, lastTime);
}
mToast.setText(tips);
mToast.show();
}
}
| {
"content_hash": "fa68fd71229f11109b1bf2db7a4d8fc7",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 75,
"avg_line_length": 20.482758620689655,
"alnum_prop": 0.5816498316498316,
"repo_name": "cowthan/AyoCompoment",
"id": "54e34b86a94d3b8762eee9a6c64503eff8e20a98",
"size": "1218",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ayo-menu-lib/src/main/java/org/ayo/sample/menu/notify/ToasterDebug.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "737806"
}
],
"symlink_target": ""
} |
%% Copyright (C) 2009-2010, Gostai S.A.S.
%%
%% This software is provided "as is" without warranty of any kind,
%% either expressed or implied, including but not limited to the
%% implied warranties of fitness for a particular purpose.
%%
%% See the LICENSE file for more information.
\section{Pattern}
\lstinline|Pattern| class is used to make correspondences between a pattern
and another \lstinline|Object|. The visit is done either on the pattern or
on the element against which the pattern is compared.
\lstinline|Pattern|s are used for the implementation of the pattern matching.
So any class made compatible with the pattern matching implemented by this
class will allow you to use it implicitly in your scripts.
\begin{urbiscript}[firstnumber=1]
[1, var a, var b] = [1, 2, 3];
[00000000] [1, 2, 3]
a;
[00000000] 2
b;
[00000000] 3
\end{urbiscript}
\subsection{Prototypes}
\begin{refObjects}
\item[Object]
\end{refObjects}
\subsection{Construction}
A \lstinline|Pattern| can be created with any object that can be matched.
\begin{urbiscript}
Pattern.new([1]); // create a pattern to match the list [1].
[00000000] Pattern_0x189ea80
Pattern.new(Pattern.Binding.new("a")); // match anything into "a".
[00000000] Pattern_0x18d98b0
\end{urbiscript}
\subsection{Slots}
\begin{urbiscriptapi}
\item[Binding]
A class used to create pattern variables.
\begin{urbiscript}
Pattern.Binding.new("a");
[00000000] var a
\end{urbiscript}
\item[bindings]
A \lstinline|Dictionary| filled by the match function for each
\refSlot{Binding} contained inside the pattern.
\begin{urbiscript}
{
var p = Pattern.new([Pattern.Binding.new("a"), Pattern.Binding.new("b")]);
assert (p.match([1, 2]));
p.bindings
};
[00000000] ["a" => 1, "b" => 2]
\end{urbiscript}
\item[match](<value>)%
Use \var{value} to unify the current pattern with this value.
Return the status of the match.
\begin{itemize}
\item[matchPattern](<pattern>, <value>)%
This function is used as a callback function to store all bindings
in the same place. This function is useful inside objects that
implement a \lstinline|match| or \lstinline|matchAgainst| function
that need to continue the match deeper. Return the status of the
match (a Boolean).
The \var{pattern} should provide a method
\lstinline|match(\var{handler},\var{value})| otherwise the value method
\lstinline|matchAgainst(\var{handler}, \var{pattern})| is used. If none
are provided the \lstinline|'=='| operator is used.
To see how to use it, you can have a look at the implementation of
\refSlot[List]{matchAgainst}.
%% This function is indirectly tested with the match of Pattern.Binding
%% inside lists.
\item[pattern]
The pattern given at the creation.
\begin{urbiassert}
Pattern.new(1).pattern == 1;
Pattern.new([1, 2]).pattern == [1, 2];
{
var pattern = [1, Pattern.Binding.new("a")];
Pattern.new(pattern).pattern === pattern
};
\end{urbiassert}
\item If the match is correct, then the \var{bindings} member will
contain the result of every matched values.
\item If the match is incorrect, then the \var{bindings} member should
not be used.
\end{itemize}
If the pattern contains multiple \refSlot{Binding} with the same name,
then the behavior is undefined.
\begin{urbiassert}
Pattern.new(1).match(1);
Pattern.new([1, 2]).match([1, 2]);
! Pattern.new([1, 2]).match([1, 3]);
! Pattern.new([1, 2]).match([1, 2, 3]);
Pattern.new(Pattern.Binding.new("a")).match(0);
Pattern.new([1, Pattern.Binding.new("a")]).match([1, 2]);
! Pattern.new([1, Pattern.Binding.new("a")]).match(0);
\end{urbiassert}
\end{urbiscriptapi}
%%% Local Variables:
%%% coding: utf-8
%%% mode: latex
%%% TeX-master: "../urbi-sdk"
%%% ispell-dictionary: "american"
%%% ispell-personal-dictionary: "../urbi.dict"
%%% fill-column: 76
%%% End:
| {
"content_hash": "440d29282fbc9fb9030f3a522424e6db",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 77,
"avg_line_length": 27.94160583941606,
"alnum_prop": 0.7123824451410659,
"repo_name": "aldebaran/urbi",
"id": "d7241138c835680a81a86c264fc84f57389a6a56",
"size": "3828",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "doc/specs/pattern.tex",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Bison",
"bytes": "35375"
},
{
"name": "C",
"bytes": "2723"
},
{
"name": "C++",
"bytes": "2835679"
},
{
"name": "CSS",
"bytes": "24903"
},
{
"name": "Emacs Lisp",
"bytes": "14099"
},
{
"name": "Java",
"bytes": "448334"
},
{
"name": "JavaScript",
"bytes": "128902"
},
{
"name": "Makefile",
"bytes": "89088"
},
{
"name": "PHP",
"bytes": "7299"
},
{
"name": "Perl",
"bytes": "139386"
},
{
"name": "Prolog",
"bytes": "10897"
},
{
"name": "Python",
"bytes": "144405"
},
{
"name": "Shell",
"bytes": "158061"
},
{
"name": "TeX",
"bytes": "3863860"
}
],
"symlink_target": ""
} |
class ImageProcessing{
public:
//variables ... image headers
int numberOfColumns, numberOfRows, numberOfBands, highVal, totalPixels, header, highHisto;
unsigned char * image, * histogram, * outimageHistogram;
//functions
void readImage(char **argv, int k);
void writeImage(char **argv, int k);
void calculateHistogram(char **argv, int k);
};
#endif | {
"content_hash": "d37ce6de5b0b37afb0a3f104f43855c6",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 91,
"avg_line_length": 27.53846153846154,
"alnum_prop": 0.7402234636871509,
"repo_name": "jelouodsa/Image-Processing",
"id": "a08a01e468ce5737cc5c28cc7647e94d7e863415",
"size": "415",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/imageprocessing.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "5261"
},
{
"name": "Shell",
"bytes": "160"
}
],
"symlink_target": ""
} |
require "spec_helper"
require "chapters/chapter"
module Viz3k
describe "Chapter" do
before(:each) do
@page1 = double()
@page2 = double()
allow(@page1).to receive(:page) { 1 }
allow(@page2).to receive(:page) { 2 }
pages = [@page1, @page2]
@chapter = Chapter.new(1, "Chapter 1", pages)
end
describe "#appearances" do
it "returns every page the specified person appears on" do
person_id = 17
allow(@page1).to receive(:has_person) { true }
allow(@page2).to receive(:has_person) { true }
expect(@page1).to receive(:has_person).with(person_id)
expect(@page2).to receive(:has_person).with(person_id)
expect(@chapter.appearances(person_id)).to eq [1,2]
end
it "returns an empty array if the person never appears" do
person_id = 17
allow(@page1).to receive(:has_person) { false }
allow(@page2).to receive(:has_person) { false }
expect(@page1).to receive(:has_person).with(person_id)
expect(@page2).to receive(:has_person).with(person_id)
expect(@chapter.appearances(person_id)).to eq []
end
end
end
end
| {
"content_hash": "d64e07180c7284469d9aac2cdc6a314c",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 64,
"avg_line_length": 33.628571428571426,
"alnum_prop": 0.6074766355140186,
"repo_name": "sbai/viz3k",
"id": "b70211738e79ca64fab71f3d10b3cb8054ae9289",
"size": "1196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/viz3k/chapters/chapter_spec.rb",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "6673"
},
{
"name": "HTML",
"bytes": "26669"
},
{
"name": "JavaScript",
"bytes": "1063184"
},
{
"name": "Ruby",
"bytes": "61413"
}
],
"symlink_target": ""
} |
.class final Lcom/google/common/collect/Multisets$MultisetIteratorImpl;
.super Ljava/lang/Object;
.source "Multisets.java"
# interfaces
.implements Ljava/util/Iterator;
# annotations
.annotation system Ldalvik/annotation/EnclosingClass;
value = Lcom/google/common/collect/Multisets;
.end annotation
.annotation system Ldalvik/annotation/InnerClass;
accessFlags = 0x18
name = "MultisetIteratorImpl"
.end annotation
.annotation system Ldalvik/annotation/Signature;
value = {
"<E:",
"Ljava/lang/Object;",
">",
"Ljava/lang/Object;",
"Ljava/util/Iterator",
"<TE;>;"
}
.end annotation
# instance fields
.field private canRemove:Z
.field private currentEntry:Lcom/google/common/collect/Multiset$Entry;
.annotation system Ldalvik/annotation/Signature;
value = {
"Lcom/google/common/collect/Multiset$Entry",
"<TE;>;"
}
.end annotation
.end field
.field private final entryIterator:Ljava/util/Iterator;
.annotation system Ldalvik/annotation/Signature;
value = {
"Ljava/util/Iterator",
"<",
"Lcom/google/common/collect/Multiset$Entry",
"<TE;>;>;"
}
.end annotation
.end field
.field private laterCount:I
.field private final multiset:Lcom/google/common/collect/Multiset;
.annotation system Ldalvik/annotation/Signature;
value = {
"Lcom/google/common/collect/Multiset",
"<TE;>;"
}
.end annotation
.end field
.field private totalCount:I
# direct methods
.method constructor <init>(Lcom/google/common/collect/Multiset;Ljava/util/Iterator;)V
.locals 0
.annotation system Ldalvik/annotation/Signature;
value = {
"(",
"Lcom/google/common/collect/Multiset",
"<TE;>;",
"Ljava/util/Iterator",
"<",
"Lcom/google/common/collect/Multiset$Entry",
"<TE;>;>;)V"
}
.end annotation
.prologue
.line 654
.local p0, "this":Lcom/google/common/collect/Multisets$MultisetIteratorImpl;, "Lcom/google/common/collect/Multisets$MultisetIteratorImpl<TE;>;"
.local p1, "multiset":Lcom/google/common/collect/Multiset;, "Lcom/google/common/collect/Multiset<TE;>;"
.local p2, "entryIterator":Ljava/util/Iterator;, "Ljava/util/Iterator<Lcom/google/common/collect/Multiset$Entry<TE;>;>;"
invoke-direct {p0}, Ljava/lang/Object;-><init>()V
.line 655
iput-object p1, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->multiset:Lcom/google/common/collect/Multiset;
.line 656
iput-object p2, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->entryIterator:Ljava/util/Iterator;
.line 657
return-void
.end method
# virtual methods
.method public hasNext()Z
.locals 1
.prologue
.line 661
.local p0, "this":Lcom/google/common/collect/Multisets$MultisetIteratorImpl;, "Lcom/google/common/collect/Multisets$MultisetIteratorImpl<TE;>;"
iget v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->laterCount:I
if-gtz v0, :cond_0
iget-object v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->entryIterator:Ljava/util/Iterator;
invoke-interface {v0}, Ljava/util/Iterator;->hasNext()Z
move-result v0
if-eqz v0, :cond_1
:cond_0
const/4 v0, 0x1
:goto_0
return v0
:cond_1
const/4 v0, 0x0
goto :goto_0
.end method
.method public next()Ljava/lang/Object;
.locals 1
.annotation system Ldalvik/annotation/Signature;
value = {
"()TE;"
}
.end annotation
.prologue
.line 666
.local p0, "this":Lcom/google/common/collect/Multisets$MultisetIteratorImpl;, "Lcom/google/common/collect/Multisets$MultisetIteratorImpl<TE;>;"
invoke-virtual {p0}, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->hasNext()Z
move-result v0
if-nez v0, :cond_0
.line 667
new-instance v0, Ljava/util/NoSuchElementException;
invoke-direct {v0}, Ljava/util/NoSuchElementException;-><init>()V
throw v0
.line 669
:cond_0
iget v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->laterCount:I
if-nez v0, :cond_1
.line 670
iget-object v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->entryIterator:Ljava/util/Iterator;
invoke-interface {v0}, Ljava/util/Iterator;->next()Ljava/lang/Object;
move-result-object v0
check-cast v0, Lcom/google/common/collect/Multiset$Entry;
iput-object v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->currentEntry:Lcom/google/common/collect/Multiset$Entry;
.line 671
iget-object v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->currentEntry:Lcom/google/common/collect/Multiset$Entry;
invoke-interface {v0}, Lcom/google/common/collect/Multiset$Entry;->getCount()I
move-result v0
iput v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->laterCount:I
iput v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->totalCount:I
.line 673
:cond_1
iget v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->laterCount:I
add-int/lit8 v0, v0, -0x1
iput v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->laterCount:I
.line 674
const/4 v0, 0x1
iput-boolean v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->canRemove:Z
.line 675
iget-object v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->currentEntry:Lcom/google/common/collect/Multiset$Entry;
invoke-interface {v0}, Lcom/google/common/collect/Multiset$Entry;->getElement()Ljava/lang/Object;
move-result-object v0
return-object v0
.end method
.method public remove()V
.locals 2
.prologue
.line 680
.local p0, "this":Lcom/google/common/collect/Multisets$MultisetIteratorImpl;, "Lcom/google/common/collect/Multisets$MultisetIteratorImpl<TE;>;"
iget-boolean v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->canRemove:Z
const-string v1, "no calls to next() since the last call to remove()"
invoke-static {v0, v1}, Lcom/google/common/base/Preconditions;->checkState(ZLjava/lang/Object;)V
.line 682
iget v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->totalCount:I
const/4 v1, 0x1
if-ne v0, v1, :cond_0
.line 683
iget-object v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->entryIterator:Ljava/util/Iterator;
invoke-interface {v0}, Ljava/util/Iterator;->remove()V
.line 687
:goto_0
iget v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->totalCount:I
add-int/lit8 v0, v0, -0x1
iput v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->totalCount:I
.line 688
const/4 v0, 0x0
iput-boolean v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->canRemove:Z
.line 689
return-void
.line 685
:cond_0
iget-object v0, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->multiset:Lcom/google/common/collect/Multiset;
iget-object v1, p0, Lcom/google/common/collect/Multisets$MultisetIteratorImpl;->currentEntry:Lcom/google/common/collect/Multiset$Entry;
invoke-interface {v1}, Lcom/google/common/collect/Multiset$Entry;->getElement()Ljava/lang/Object;
move-result-object v1
invoke-interface {v0, v1}, Lcom/google/common/collect/Multiset;->remove(Ljava/lang/Object;)Z
goto :goto_0
.end method
| {
"content_hash": "2982e84df2d1608635741d9189c2ee9f",
"timestamp": "",
"source": "github",
"line_count": 260,
"max_line_length": 147,
"avg_line_length": 30.46153846153846,
"alnum_prop": 0.6746212121212121,
"repo_name": "x5y/SparkNZ-Xposed",
"id": "c5eaad677d6ba059d0c30c5af62f71bf54b40762",
"size": "7920",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "TelecomApp_Decompiled/smali/com/google/common/collect/Multisets$MultisetIteratorImpl.smali",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "2972"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="refresh" content="0;URL=macro.include_bytes.html">
</head>
<body>
<p>Redirecting to <a href="macro.include_bytes.html">macro.include_bytes.html</a>...</p>
<script>location.replace("macro.include_bytes.html" + location.search + location.hash);</script>
</body>
</html> | {
"content_hash": "bbc53ee88fd13dd367c334445cd941d2",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 100,
"avg_line_length": 33.7,
"alnum_prop": 0.6824925816023739,
"repo_name": "sbeckeriv/warc_nom_parser",
"id": "281a507068dcbf7c36efc90feff77287d2bf34a1",
"size": "337",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/doc/nom/lib/std/prelude/v1/v1/macro.include_bytes!.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Rust",
"bytes": "8117"
}
],
"symlink_target": ""
} |
package org.springframework.cloud.gcp.cloudfoundry;
import java.io.IOException;
import java.nio.file.Files;
import java.util.List;
import java.util.Map;
import io.pivotal.cfenv.test.AbstractCfEnvTests;
import org.junit.Before;
import org.junit.Test;
import org.springframework.boot.json.JsonParser;
import org.springframework.boot.json.JsonParserFactory;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.core.io.ClassPathResource;
import org.springframework.test.context.support.TestPropertySourceUtils;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for the Cloud Foundry environment post-processor.
*
* @author João André Martins
* @author Chengyuan Zhao
* @author Eddú Meléndez
*/
public class GcpCloudFoundryEnvironmentPostProcessorTests extends AbstractCfEnvTests {
private GcpCloudFoundryEnvironmentPostProcessor initializer = new GcpCloudFoundryEnvironmentPostProcessor();
private final ConfigurableApplicationContext context = new AnnotationConfigApplicationContext();
@Before
public void setup() {
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
"spring.cloud.gcp.sql.instance-connection-name=test-connection",
"spring.cloud.gcp.sql.database-name=test-dbname",
"spring.cloud.gcp.sql.credentials.encoded-key=test-key");
}
@Test
public void testConfigurationProperties() throws IOException {
String vcapFileContents = new String(Files.readAllBytes(
new ClassPathResource("VCAP_SERVICES").getFile().toPath()));
mockVcapServices(vcapFileContents);
this.initializer.postProcessEnvironment(this.context.getEnvironment(), null);
assertThat(getProperty("spring.cloud.gcp.pubsub.project-id")).isEqualTo("graphite-test-spring-cloud-gcp");
assertThat(getProperty("spring.cloud.gcp.pubsub.credentials.encoded-key"))
.isEqualTo(getPrivateKeyDataFromJson(vcapFileContents, "google-pubsub"));
assertThat(getProperty("spring.cloud.gcp.storage.credentials.encoded-key"))
.isEqualTo(getPrivateKeyDataFromJson(vcapFileContents, "google-storage"));
assertThat(getProperty("spring.cloud.gcp.spanner.project-id"))
.isEqualTo("graphite-test-spring-cloud-gcp");
assertThat(getProperty("spring.cloud.gcp.spanner.credentials.encoded-key"))
.isEqualTo(getPrivateKeyDataFromJson(vcapFileContents, "google-spanner"));
assertThat(getProperty("spring.cloud.gcp.spanner.instance-id"))
.isEqualTo("pcf-sb-7-1521579042901037743");
assertThat(getProperty("spring.cloud.gcp.datastore.project-id"))
.isEqualTo("graphite-test-spring-cloud-gcp");
assertThat(getProperty("spring.cloud.gcp.datastore.credentials.encoded-key"))
.isEqualTo(getPrivateKeyDataFromJson(vcapFileContents, "google-datastore"));
assertThat(getProperty("spring.cloud.gcp.firestore.project-id"))
.isEqualTo("pcf-dev-01-17031");
assertThat(getProperty("spring.cloud.gcp.firestore.credentials.encoded-key"))
.isEqualTo(getPrivateKeyDataFromJson(vcapFileContents, "google-firestore"));
assertThat(getProperty("spring.cloud.gcp.bigquery.project-id"))
.isEqualTo("pcf-dev-01-17031");
assertThat(getProperty("spring.cloud.gcp.bigquery.credentials.encoded-key"))
.isEqualTo(getPrivateKeyDataFromJson(vcapFileContents, "google-bigquery"));
assertThat(getProperty("spring.cloud.gcp.bigquery.dataset-name")).isEqualTo("test_dataset");
assertThat(getProperty("spring.cloud.gcp.trace.project-id"))
.isEqualTo("graphite-test-spring-cloud-gcp");
assertThat(getProperty("spring.cloud.gcp.trace.credentials.encoded-key"))
.isEqualTo(getPrivateKeyDataFromJson(vcapFileContents, "google-stackdriver-trace"));
assertThat(getProperty("spring.cloud.gcp.sql.credentials.encoded-key"))
.isEqualTo(getPrivateKeyDataFromJson(vcapFileContents, "google-cloudsql-postgres"));
assertThat(getProperty("spring.cloud.gcp.sql.instance-connection-name"))
.isEqualTo("graphite-test-spring-cloud-gcp:us-central1:pcf-sb-3-1521233681947276465");
assertThat(getProperty("spring.cloud.gcp.sql.database-name"))
.isEqualTo("pcf-sb-4-1521234236513507790");
assertThat(getProperty("spring.datasource.username"))
.isEqualTo("fa6bb781-c76d-42");
assertThat(getProperty("spring.datasource.password"))
.isEqualTo("IxEQ63FRxSUSgoDWKbqEHmhY6D9h4nro1fja8lnP48s=");
}
@Test
public void test2Sqls() throws IOException {
String vcapFileContents = new String(Files.readAllBytes(
new ClassPathResource("VCAP_SERVICES_2_SQL").getFile().toPath()));
mockVcapServices(vcapFileContents);
assertThat(getProperty("spring.cloud.gcp.sql.database-name"))
.isEqualTo("test-dbname");
assertThat(getProperty("spring.cloud.gcp.sql.instance-connection-name"))
.isEqualTo("test-connection");
assertThat(getProperty("spring.cloud.gcp.sql.credentials.encoded-key"))
.isEqualTo("test-key");
}
@Test
public void testUserProvidedServices() throws IOException {
String vcapFileContents = new String(Files.readAllBytes(
new ClassPathResource("VCAP_SERVICES_USER_PROVIDED").getFile().toPath()));
mockVcapServices(vcapFileContents);
this.initializer.postProcessEnvironment(this.context.getEnvironment(), null);
assertThat(getProperty("spring.cloud.gcp.spanner.project-id"))
.isEqualTo("spanner-project-id");
assertThat(getProperty("spring.cloud.gcp.spanner.instance-id"))
.isEqualTo("spanner-instance");
}
private String getPrivateKeyDataFromJson(String json, String serviceName) {
JsonParser parser = JsonParserFactory.getJsonParser();
Map<String, Object> vcapMap = parser.parseMap(json);
return ((Map<String, String>) ((Map<String, Object>) ((List<Object>) vcapMap.get(serviceName)).get(0))
.get("credentials")).get("PrivateKeyData");
}
private String getProperty(String key) {
return this.context.getEnvironment().getProperty(key);
}
}
| {
"content_hash": "e192222b62193d614ce6457bbfe997d8",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 109,
"avg_line_length": 42.60431654676259,
"alnum_prop": 0.7825059101654847,
"repo_name": "spring-cloud/spring-cloud-gcp",
"id": "5d18c4d741af87df3fe4d110f3db641c21972fbb",
"size": "6547",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "spring-cloud-gcp-cloudfoundry/src/test/java/org/springframework/cloud/gcp/cloudfoundry/GcpCloudFoundryEnvironmentPostProcessorTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1504"
},
{
"name": "FreeMarker",
"bytes": "305"
},
{
"name": "HTML",
"bytes": "27677"
},
{
"name": "Java",
"bytes": "2644899"
},
{
"name": "Kotlin",
"bytes": "11399"
},
{
"name": "Shell",
"bytes": "1361"
},
{
"name": "TSQL",
"bytes": "330"
}
],
"symlink_target": ""
} |
package org.scify.jedai.schemaclustering;
import gnu.trove.iterator.TIntIterator;
import gnu.trove.iterator.TObjectIntIterator;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.map.TObjectIntMap;
import gnu.trove.map.hash.TObjectIntHashMap;
import gnu.trove.set.TIntSet;
import gnu.trove.set.hash.TIntHashSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.jena.atlas.json.JsonArray;
import org.apache.jena.atlas.json.JsonObject;
import org.scify.jedai.configuration.gridsearch.IntGridSearchConfiguration;
import org.scify.jedai.datamodel.Attribute;
import org.scify.jedai.datamodel.EntityProfile;
import org.scify.jedai.datamodel.RepModelSimMetricCombo;
import org.scify.jedai.configuration.randomsearch.IntRandomSearchConfiguration;
import org.scify.jedai.datamodel.AttributeClusters;
import org.scify.jedai.textmodels.ITextModel;
import org.scify.jedai.utilities.enumerations.RepresentationModel;
import org.scify.jedai.utilities.enumerations.SimilarityMetric;
import org.scify.jedai.utilities.graph.ConnectedComponents;
import org.scify.jedai.utilities.graph.UndirectedGraph;
/**
*
* @author G.A.P. II
*/
public abstract class AbstractAttributeClustering implements ISchemaClustering {
protected int attributesDelimiter;
protected int noOfAttributes;
protected float a; // minimum portion of max similarity for connecting two attributes
protected float[] globalMaxSimilarities;
protected final IntGridSearchConfiguration gridCombo;
protected final IntRandomSearchConfiguration randomCombo;
protected ITextModel[][] attributeModels;
protected final List<RepModelSimMetricCombo> modelMetricCombinations;
protected Map<String, TIntList> invertedIndex;
protected RepresentationModel repModel;
protected SimilarityMetric simMetric;
protected TObjectIntMap<String> attrNameIndex;
public AbstractAttributeClustering(float a, RepresentationModel model, SimilarityMetric metric) {
this.a = a;
repModel = model;
simMetric = metric;
attributeModels = new ITextModel[2][];
modelMetricCombinations = RepModelSimMetricCombo.getAllValidCombos();
gridCombo = new IntGridSearchConfiguration(modelMetricCombinations.size() - 1, 0, 1);
randomCombo = new IntRandomSearchConfiguration(modelMetricCombinations.size(), 0);
}
private void buildAttributeModels(int datasetId, List<EntityProfile> profiles) {
attrNameIndex = new TObjectIntHashMap<>();
profiles.forEach((profile) -> {
profile.getAttributes().forEach((attribute) -> {
attrNameIndex.putIfAbsent(attribute.getName(), attrNameIndex.size() + 1);
});
});
int currentAttributes = attrNameIndex.size();
attributeModels[datasetId] = new ITextModel[currentAttributes];
final TObjectIntIterator<String> it = attrNameIndex.iterator();
while (it.hasNext()) {
it.advance();
attributeModels[datasetId][it.value() - 1] = RepresentationModel.getModel(datasetId, repModel, simMetric, it.key());
}
profiles.forEach((profile) -> {
profile.getAttributes().forEach((attribute) -> {
updateModel(datasetId, attribute);
});
});
for (int i = 0; i < currentAttributes; i++) {
attributeModels[datasetId][i].finalizeModel();
}
}
private void buildInvertedIndex() {
invertedIndex = new HashMap<>();
int indexedDataset = 0 < attributesDelimiter ? DATASET_2 : DATASET_1;
for (int i = 0; i < attributeModels[indexedDataset].length; i++) {
final Set<String> signatures = attributeModels[indexedDataset][i].getSignatures();
for (String signature : signatures) {
TIntList attributeIds = invertedIndex.get(signature);
if (attributeIds == null) {
attributeIds = new TIntArrayList();
invertedIndex.put(signature, attributeIds);
}
attributeIds.add(i);
}
}
}
protected AttributeClusters[] clusterAttributes() {
final UndirectedGraph similarityGraph = new UndirectedGraph(noOfAttributes);
final TIntSet coOccurringAttrs = new TIntHashSet();
int lastId = 0 < attributesDelimiter ? attributesDelimiter : noOfAttributes;
for (int i = 0; i < lastId; i++) {
coOccurringAttrs.clear();
final Set<String> signatures = attributeModels[DATASET_1][i].getSignatures();
for (String signature : signatures) {
final TIntList attrIds = invertedIndex.get(signature);
if (attrIds == null) {
continue;
}
coOccurringAttrs.addAll(attrIds);
}
if (0 < attributesDelimiter) { // Clean-Clean ER
connectCleanCleanErComparisons(i, coOccurringAttrs, similarityGraph);
} else { // Dirty ER
connectDirtyErComparisons(i, coOccurringAttrs, similarityGraph);
}
}
AttributeClusters[] aClusters;
final ConnectedComponents cc = new ConnectedComponents(similarityGraph);
if (attributesDelimiter < 0) { // Dirty ER
aClusters = new AttributeClusters[1];
aClusters[0] = clusterAttributes(DATASET_1, cc);
} else { // Clean-Clean ER
aClusters = new AttributeClusters[2];
aClusters[0] = clusterAttributes(DATASET_1, cc);
aClusters[1] = clusterAttributes(DATASET_2, cc);
}
return aClusters;
}
protected AttributeClusters clusterAttributes(int datasetId, ConnectedComponents cc) {
int firstId = datasetId == DATASET_1 ? 0 : attributesDelimiter;
int lastId = 0 < attributesDelimiter && datasetId == DATASET_1 ? attributesDelimiter : noOfAttributes;
int glueClusterId = cc.count() + 1;
int[] clusterFrequency = new int[glueClusterId + 1];
float[] clusterEntropy = new float[glueClusterId + 1];
final TObjectIntMap<String> clusters = new TObjectIntHashMap<>();
for (int i = firstId; i < lastId; i++) {
int ccId = cc.id(i);
if (cc.size(i) == 1) { // singleton attribute
ccId = glueClusterId;
}
clusterFrequency[ccId]++;
clusterEntropy[ccId] += attributeModels[datasetId][i].getEntropy(true);
clusters.put(attributeModels[datasetId][i].getInstanceName(), ccId);
}
for (int i = 0; i < glueClusterId + 1; i++) {
clusterEntropy[i] /= clusterFrequency[i];
}
return new AttributeClusters(clusterEntropy, clusters);
}
protected void compareAttributes() {
globalMaxSimilarities = new float[noOfAttributes];
final TIntSet coOccurringAttrs = new TIntHashSet();
int lastId = 0 < attributesDelimiter ? attributesDelimiter : noOfAttributes;
for (int i = 0; i < lastId; i++) {
coOccurringAttrs.clear();
final Set<String> signatures = attributeModels[DATASET_1][i].getSignatures();
for (String signature : signatures) {
final TIntList attrIds = invertedIndex.get(signature);
if (attrIds == null) {
continue;
}
coOccurringAttrs.addAll(attrIds);
}
if (0 < attributesDelimiter) { // Clean-Clean ER
executeCleanCleanErComparisons(i, coOccurringAttrs);
} else { // Dirty ER
executeDirtyErComparisons(i, coOccurringAttrs);
}
}
}
private void connectCleanCleanErComparisons(int attributeId, TIntSet coOccurringAttrs, UndirectedGraph similarityGraph) {
for (TIntIterator sigIterator = coOccurringAttrs.iterator(); sigIterator.hasNext();) {
int neighborId = sigIterator.next();
int normalizedNeighborId = neighborId + attributesDelimiter;
float similarity = attributeModels[DATASET_1][attributeId].getSimilarity(attributeModels[DATASET_2][neighborId]);
if (a * globalMaxSimilarities[attributeId] < similarity
|| a * globalMaxSimilarities[normalizedNeighborId] < similarity) {
similarityGraph.addEdge(attributeId, normalizedNeighborId);
}
}
}
private void connectDirtyErComparisons(int attributeId, TIntSet coOccurringAttrs, UndirectedGraph similarityGraph) {
for (TIntIterator sigIterator = coOccurringAttrs.iterator(); sigIterator.hasNext();) {
int neighborId = sigIterator.next();
if (neighborId <= attributeId) { // avoid repeated comparisons & comparison with attributeId
continue;
}
float similarity = attributeModels[DATASET_1][attributeId].getSimilarity(attributeModels[DATASET_1][neighborId]);
if (a * globalMaxSimilarities[attributeId] < similarity
|| a * globalMaxSimilarities[neighborId] < similarity) {
similarityGraph.addEdge(attributeId, neighborId);
}
}
}
private void executeCleanCleanErComparisons(int attributeId, TIntSet coOccurringAttrs) {
for (TIntIterator sigIterator = coOccurringAttrs.iterator(); sigIterator.hasNext();) {
int neighborId = sigIterator.next();
int normalizedNeighborId = neighborId + attributesDelimiter;
float similarity = attributeModels[DATASET_1][attributeId].getSimilarity(attributeModels[DATASET_2][neighborId]);
if (globalMaxSimilarities[attributeId] < similarity) {
globalMaxSimilarities[attributeId] = similarity;
}
if (globalMaxSimilarities[normalizedNeighborId] < similarity) {
globalMaxSimilarities[normalizedNeighborId] = similarity;
}
}
}
private void executeDirtyErComparisons(int attributeId, TIntSet coOccurringAttrs) {
for (TIntIterator sigIterator = coOccurringAttrs.iterator(); sigIterator.hasNext();) {
int neighborId = sigIterator.next();
if (neighborId <= attributeId) { // avoid repeated comparisons & comparison with attributeId
continue;
}
float similarity = attributeModels[DATASET_1][attributeId].getSimilarity(attributeModels[DATASET_1][neighborId]);
if (globalMaxSimilarities[attributeId] < similarity) {
globalMaxSimilarities[attributeId] = similarity;
}
if (globalMaxSimilarities[neighborId] < similarity) {
globalMaxSimilarities[neighborId] = similarity;
}
}
}
@Override
public AttributeClusters[] getClusters(List<EntityProfile> profiles) {
return this.getClusters(profiles, null);
}
@Override
public AttributeClusters[] getClusters(List<EntityProfile> profilesD1, List<EntityProfile> profilesD2) {
buildAttributeModels(DATASET_1, profilesD1);
attributesDelimiter = -1;
noOfAttributes = attrNameIndex.size();
if (profilesD2 != null) {
buildAttributeModels(DATASET_2, profilesD2);
attributesDelimiter = noOfAttributes;
noOfAttributes += attrNameIndex.size();
}
attrNameIndex = null;
buildInvertedIndex();
compareAttributes();
return clusterAttributes();
}
@Override
public String getMethodConfiguration() {
return getParameterName(0) + "=" + repModel + "\t"
+ getParameterName(1) + "=" + simMetric;
}
@Override
public String getMethodParameters() {
return getMethodName() + " involves two parameters:\n"
+ "1)" + getParameterDescription(0) + ".\n"
+ "2)" + getParameterDescription(1) + ".";
}
@Override
public int getNumberOfGridConfigurations() {
return gridCombo.getNumberOfConfigurations();
}
@Override
public JsonArray getParameterConfiguration() {
final JsonObject obj1 = new JsonObject();
obj1.put("class", "org.scify.jedai.utilities.enumerations.RepresentationModel");
obj1.put("name", getParameterName(0));
obj1.put("defaultValue", "org.scify.jedai.utilities.enumerations.RepresentationModel.TOKEN_UNIGRAM_GRAPHS");
obj1.put("minValue", "-");
obj1.put("maxValue", "-");
obj1.put("stepValue", "-");
obj1.put("description", getParameterDescription(0));
final JsonObject obj2 = new JsonObject();
obj2.put("class", "org.scify.jedai.utilities.enumerations.SimilarityMetric");
obj2.put("name", getParameterName(1));
obj2.put("defaultValue", "org.scify.jedai.utilities.enumerations.SimilarityMetric.GRAPH_VALUE_SIMILARITY");
obj2.put("minValue", "-");
obj2.put("maxValue", "-");
obj2.put("stepValue", "-");
obj2.put("description", getParameterDescription(1));
final JsonArray array = new JsonArray();
array.add(obj1);
array.add(obj2);
return array;
}
@Override
public String getParameterDescription(int parameterId) {
switch (parameterId) {
case 0:
return "The " + getParameterName(0) + " aggregates the textual items that correspond to every attribute.";
case 1:
return "The " + getParameterName(1) + " compares the models of two attributes, returning a value between 0 (completely dissimlar) and 1 (identical).";
default:
return "invalid parameter id";
}
}
@Override
public String getParameterName(int parameterId) {
switch (parameterId) {
case 0:
return "Representation Model";
case 1:
return "Similarity Measure";
default:
return "invalid parameter id";
}
}
@Override
public void setNextRandomConfiguration() {
int comboId = (Integer) randomCombo.getNextRandomValue();
final RepModelSimMetricCombo selectedCombo = modelMetricCombinations.get(comboId);
repModel = selectedCombo.getRepModel();
simMetric = selectedCombo.getSimMetric();
}
@Override
public void setNumberedGridConfiguration(int iterationNumber) {
int comboId = (Integer) gridCombo.getNumberedValue(iterationNumber);
final RepModelSimMetricCombo selectedCombo = modelMetricCombinations.get(comboId);
repModel = selectedCombo.getRepModel();
simMetric = selectedCombo.getSimMetric();
}
@Override
public void setNumberedRandomConfiguration(int iterationNumber) {
int comboId = (Integer) randomCombo.getNumberedRandom(iterationNumber);
final RepModelSimMetricCombo selectedCombo = modelMetricCombinations.get(comboId);
repModel = selectedCombo.getRepModel();
simMetric = selectedCombo.getSimMetric();
}
protected abstract void updateModel(int datasetId, Attribute attribute);
}
| {
"content_hash": "e8c4f07d4364d0c96d6ffeea7c5b9af2",
"timestamp": "",
"source": "github",
"line_count": 373,
"max_line_length": 166,
"avg_line_length": 42.042895442359246,
"alnum_prop": 0.6321897717127918,
"repo_name": "scify/JedAIToolkit",
"id": "890e4f1b35b80cb778f54137a69a88447d96af2a",
"size": "16323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/scify/jedai/schemaclustering/AbstractAttributeClustering.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1607839"
}
],
"symlink_target": ""
} |
Below are a few ways to make contributing to `array-mapcat` smoother.
## Issues
When opening an issue for a bug, please include steps for reproducing the problem. _If we can't reproduce it, we can't fix it_. If you are suggesting a new feature, please provide a clear and detailed explanation.
## Setup
git clone https://github.com/wilmoore/array-mapcat.js.git
cd array-mapcat.js
npm install
npm run dev
## Tests
If you are adding a new feature, please include tests. The test suite for this project uses [tape](https://github.com/substack/tape). To run the test suite, type `npm test`. You may also run the development watcher (`npm run dev`) which uses [nodemon](http://nodemon.io) to re-run the test suite when files are modified. You can also get the raw tap output by running `node test`.
## Style
To keep a consistent coding style in the project, we're using [JavaScript Standard Style](https://github.com/feross/standard).
```shell
npm run standard
```
> This command will be run automatically with `npm run dev`; however, you can run it on-demand as necessary.
## Dependencies
To ensure that (1) we are not depending on uninstalled packages and (2) we haven't installed any unused packaged, we're using [dependency-check](https://www.npmjs.com/package/dependency-check).
```shell
npm run dependency-check
```
> This command will be run automatically with `npm run dev`; however, you can run it on-demand as necessary.
## Package
To keep a consistent `package.json`, we're using [`fixpack`](https://www.npmjs.com/package/fixpack).
```shell
npm run fixpack
```
## Commits
When submitting pull requests please add a [well-written and clear commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html) and squash your commits. This means being familiar with rebasing - if you are not, [this guide](https://help.github.com/articles/about-git-rebase/) should help you to get started. If you are still confused, feel free to ask for help.
| {
"content_hash": "f8299f79fa4455345a35bf8874347663",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 386,
"avg_line_length": 43.43478260869565,
"alnum_prop": 0.7467467467467468,
"repo_name": "wilmoore/array-mapcat.js",
"id": "144d77d78a8766f6bbe6149e3147756e91b2ad51",
"size": "2030",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contributing.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1320"
}
],
"symlink_target": ""
} |
favorite:17 / forked:9
Ambient Occlusion Bench Flash10 porting
Original version of AO bench was written by Syoyo Fujita.
http://lucille.atso-net.jp/aobench/
In original Flash10 porting, it takes 7 times slower than the Proce55ing.
(refer from http://lucille.atso-net.jp/blog/?p=638).
And now, it seems to be same speed as the Proce55ing does.
----------------------------------------------------------------------
webpage; http://soundimpulse.sakura.ne.jp/ambient-occlusion-rendering/
![thumbnail](./thumbnail.jpg)
| {
"content_hash": "b100c3fecfa111b0f530112d8bf6c107",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 75,
"avg_line_length": 44.333333333333336,
"alnum_prop": 0.6522556390977443,
"repo_name": "keim/wonderfl",
"id": "86137a54896673c28332e9ed11337ab429968da8",
"size": "593",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "projects/20090218_Ambient_Occlusion_Rendering/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "1667678"
},
{
"name": "AngelScript",
"bytes": "107061"
},
{
"name": "HTML",
"bytes": "58627"
},
{
"name": "Ruby",
"bytes": "7324"
}
],
"symlink_target": ""
} |
package com.alibaba.rocketmq.remoting.netty;
import com.alibaba.rocketmq.remoting.ChannelEventListener;
import com.alibaba.rocketmq.remoting.InvokeCallback;
import com.alibaba.rocketmq.remoting.RPCHook;
import com.alibaba.rocketmq.remoting.RemotingClient;
import com.alibaba.rocketmq.remoting.common.Pair;
import com.alibaba.rocketmq.remoting.common.RemotingHelper;
import com.alibaba.rocketmq.remoting.common.RemotingUtil;
import com.alibaba.rocketmq.remoting.exception.RemotingConnectException;
import com.alibaba.rocketmq.remoting.exception.RemotingSendRequestException;
import com.alibaba.rocketmq.remoting.exception.RemotingTimeoutException;
import com.alibaba.rocketmq.remoting.exception.RemotingTooMuchRequestException;
import com.alibaba.rocketmq.remoting.protocol.RemotingCommand;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.*;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.timeout.IdleState;
import io.netty.handler.timeout.IdleStateEvent;
import io.netty.handler.timeout.IdleStateHandler;
import io.netty.util.concurrent.DefaultEventExecutorGroup;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.SocketAddress;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* @author shijia.wxr
*/
public class NettyRemotingClient extends NettyRemotingAbstract implements RemotingClient {
private static final Logger log = LoggerFactory.getLogger(RemotingHelper.RemotingLogName);
private static final long LockTimeoutMillis = 3000;
private final NettyClientConfig nettyClientConfig;
private final Bootstrap bootstrap = new Bootstrap();
private final EventLoopGroup eventLoopGroupWorker;
private final Lock lockChannelTables = new ReentrantLock();
private final ConcurrentHashMap<String /* addr */, ChannelWrapper> channelTables = new ConcurrentHashMap<String, ChannelWrapper>();
private final Timer timer = new Timer("ClientHouseKeepingService", true);
private final AtomicReference<List<String>> namesrvAddrList = new AtomicReference<List<String>>();
private final AtomicReference<String> namesrvAddrChoosed = new AtomicReference<String>();
private final AtomicInteger namesrvIndex = new AtomicInteger(initValueIndex());
private final Lock lockNamesrvChannel = new ReentrantLock();
private final ExecutorService publicExecutor;
private final ChannelEventListener channelEventListener;
private DefaultEventExecutorGroup defaultEventExecutorGroup;
private RPCHook rpcHook;
public NettyRemotingClient(final NettyClientConfig nettyClientConfig) {
this(nettyClientConfig, null);
}
public NettyRemotingClient(final NettyClientConfig nettyClientConfig, //
final ChannelEventListener channelEventListener) {
super(nettyClientConfig.getClientOnewaySemaphoreValue(), nettyClientConfig.getClientAsyncSemaphoreValue());
this.nettyClientConfig = nettyClientConfig;
this.channelEventListener = channelEventListener;
int publicThreadNums = nettyClientConfig.getClientCallbackExecutorThreads();
if (publicThreadNums <= 0) {
publicThreadNums = 4;
}
this.publicExecutor = Executors.newFixedThreadPool(publicThreadNums, new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "NettyClientPublicExecutor_" + this.threadIndex.incrementAndGet());
}
});
this.eventLoopGroupWorker = new NioEventLoopGroup(1, new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, String.format("NettyClientSelector_%d", this.threadIndex.incrementAndGet()));
}
});
}
private static int initValueIndex() {
Random r = new Random();
return Math.abs(r.nextInt() % 999) % 999;
}
@Override
public void start() {
this.defaultEventExecutorGroup = new DefaultEventExecutorGroup(//
nettyClientConfig.getClientWorkerThreads(), //
new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "NettyClientWorkerThread_" + this.threadIndex.incrementAndGet());
}
});
Bootstrap handler = this.bootstrap.group(this.eventLoopGroupWorker).channel(NioSocketChannel.class)//
//
.option(ChannelOption.TCP_NODELAY, true)
//
.option(ChannelOption.SO_KEEPALIVE, false)
//
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, nettyClientConfig.getConnectTimeoutMillis())
//
.option(ChannelOption.SO_SNDBUF, nettyClientConfig.getClientSocketSndBufSize())
//
.option(ChannelOption.SO_RCVBUF, nettyClientConfig.getClientSocketRcvBufSize())
//
.handler(new ChannelInitializer<SocketChannel>() {
@Override
public void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(//
defaultEventExecutorGroup, //
new NettyEncoder(), //
new NettyDecoder(), //
new IdleStateHandler(0, 0, nettyClientConfig.getClientChannelMaxIdleTimeSeconds()), //
new NettyConnetManageHandler(), //
new NettyClientHandler());
}
});
this.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
try {
NettyRemotingClient.this.scanResponseTable();
} catch (Exception e) {
log.error("scanResponseTable exception", e);
}
}
}, 1000 * 3, 1000);
if (this.channelEventListener != null) {
this.nettyEventExecuter.start();
}
}
@Override
public void shutdown() {
try {
this.timer.cancel();
for (ChannelWrapper cw : this.channelTables.values()) {
this.closeChannel(null, cw.getChannel());
}
this.channelTables.clear();
this.eventLoopGroupWorker.shutdownGracefully();
if (this.nettyEventExecuter != null) {
this.nettyEventExecuter.shutdown();
}
if (this.defaultEventExecutorGroup != null) {
this.defaultEventExecutorGroup.shutdownGracefully();
}
} catch (Exception e) {
log.error("NettyRemotingClient shutdown exception, ", e);
}
if (this.publicExecutor != null) {
try {
this.publicExecutor.shutdown();
} catch (Exception e) {
log.error("NettyRemotingServer shutdown exception, ", e);
}
}
}
public void closeChannel(final String addr, final Channel channel) {
if (null == channel)
return;
final String addrRemote = null == addr ? RemotingHelper.parseChannelRemoteAddr(channel) : addr;
try {
if (this.lockChannelTables.tryLock(LockTimeoutMillis, TimeUnit.MILLISECONDS)) {
try {
boolean removeItemFromTable = true;
final ChannelWrapper prevCW = this.channelTables.get(addrRemote);
log.info("closeChannel: begin close the channel[{}] Found: {}", addrRemote, prevCW != null);
if (null == prevCW) {
log.info("closeChannel: the channel[{}] has been removed from the channel table before", addrRemote);
removeItemFromTable = false;
} else if (prevCW.getChannel() != channel) {
log.info("closeChannel: the channel[{}] has been closed before, and has been created again, nothing to do.",
addrRemote);
removeItemFromTable = false;
}
if (removeItemFromTable) {
this.channelTables.remove(addrRemote);
log.info("closeChannel: the channel[{}] was removed from channel table", addrRemote);
}
RemotingUtil.closeChannel(channel);
} catch (Exception e) {
log.error("closeChannel: close the channel exception", e);
} finally {
this.lockChannelTables.unlock();
}
} else {
log.warn("closeChannel: try to lock channel table, but timeout, {}ms", LockTimeoutMillis);
}
} catch (InterruptedException e) {
log.error("closeChannel exception", e);
}
}
@Override
public void registerRPCHook(RPCHook rpcHook) {
this.rpcHook = rpcHook;
}
public void closeChannel(final Channel channel) {
if (null == channel)
return;
try {
if (this.lockChannelTables.tryLock(LockTimeoutMillis, TimeUnit.MILLISECONDS)) {
try {
boolean removeItemFromTable = true;
ChannelWrapper prevCW = null;
String addrRemote = null;
for (Map.Entry<String, ChannelWrapper> entry : channelTables.entrySet()) {
String key = entry.getKey();
ChannelWrapper prev = entry.getValue();
if (prev.getChannel() != null) {
if (prev.getChannel() == channel) {
prevCW = prev;
addrRemote = key;
break;
}
}
}
if (null == prevCW) {
log.info("eventCloseChannel: the channel[{}] has been removed from the channel table before", addrRemote);
removeItemFromTable = false;
}
if (removeItemFromTable) {
this.channelTables.remove(addrRemote);
log.info("closeChannel: the channel[{}] was removed from channel table", addrRemote);
RemotingUtil.closeChannel(channel);
}
} catch (Exception e) {
log.error("closeChannel: close the channel exception", e);
} finally {
this.lockChannelTables.unlock();
}
} else {
log.warn("closeChannel: try to lock channel table, but timeout, {}ms", LockTimeoutMillis);
}
} catch (InterruptedException e) {
log.error("closeChannel exception", e);
}
}
@Override
public void updateNameServerAddressList(List<String> addrs) {
List<String> old = this.namesrvAddrList.get();
boolean update = false;
if (!addrs.isEmpty()) {
if (null == old) {
update = true;
} else if (addrs.size() != old.size()) {
update = true;
} else {
for (int i = 0; i < addrs.size() && !update; i++) {
if (!old.contains(addrs.get(i))) {
update = true;
}
}
}
if (update) {
Collections.shuffle(addrs);
this.namesrvAddrList.set(addrs);
}
}
}
@Override
public List<String> getNameServerAddressList() {
return this.namesrvAddrList.get();
}
@Override
public RemotingCommand invokeSync(String addr, final RemotingCommand request, long timeoutMillis)
throws InterruptedException, RemotingConnectException, RemotingSendRequestException, RemotingTimeoutException {
final Channel channel = this.getAndCreateChannel(addr);
if (channel != null && channel.isActive()) {
try {
if (this.rpcHook != null) {
this.rpcHook.doBeforeRequest(addr, request);
}
RemotingCommand response = this.invokeSyncImpl(channel, request, timeoutMillis);
if (this.rpcHook != null) {
this.rpcHook.doAfterResponse(RemotingHelper.parseChannelRemoteAddr(channel), request, response);
}
return response;
} catch (RemotingSendRequestException e) {
log.warn("invokeSync: send request exception, so close the channel[{}]", addr);
this.closeChannel(addr, channel);
throw e;
} catch (RemotingTimeoutException e) {
if (nettyClientConfig.isClientCloseSocketIfTimeout()) {
this.closeChannel(addr, channel);
log.warn("invokeSync: close socket because of timeout, {}ms, {}", timeoutMillis, addr);
}
log.warn("invokeSync: wait response timeout exception, the channel[{}]", addr);
throw e;
}
} else {
this.closeChannel(addr, channel);
throw new RemotingConnectException(addr);
}
}
private Channel getAndCreateChannel(final String addr) throws InterruptedException {
if (null == addr)
return getAndCreateNameserverChannel();
ChannelWrapper cw = this.channelTables.get(addr);
if (cw != null && cw.isOK()) {
return cw.getChannel();
}
return this.createChannel(addr);
}
private Channel getAndCreateNameserverChannel() throws InterruptedException {
String addr = this.namesrvAddrChoosed.get();
if (addr != null) {
ChannelWrapper cw = this.channelTables.get(addr);
if (cw != null && cw.isOK()) {
return cw.getChannel();
}
}
final List<String> addrList = this.namesrvAddrList.get();
if (this.lockNamesrvChannel.tryLock(LockTimeoutMillis, TimeUnit.MILLISECONDS)) {
try {
addr = this.namesrvAddrChoosed.get();
if (addr != null) {
ChannelWrapper cw = this.channelTables.get(addr);
if (cw != null && cw.isOK()) {
return cw.getChannel();
}
}
if (addrList != null && !addrList.isEmpty()) {
for (int i = 0; i < addrList.size(); i++) {
int index = this.namesrvIndex.incrementAndGet();
index = Math.abs(index);
index = index % addrList.size();
String newAddr = addrList.get(index);
this.namesrvAddrChoosed.set(newAddr);
Channel channelNew = this.createChannel(newAddr);
if (channelNew != null)
return channelNew;
}
}
} catch (Exception e) {
log.error("getAndCreateNameserverChannel: create name server channel exception", e);
} finally {
this.lockNamesrvChannel.unlock();
}
} else {
log.warn("getAndCreateNameserverChannel: try to lock name server, but timeout, {}ms", LockTimeoutMillis);
}
return null;
}
private Channel createChannel(final String addr) throws InterruptedException {
ChannelWrapper cw = this.channelTables.get(addr);
if (cw != null && cw.isOK()) {
return cw.getChannel();
}
if (this.lockChannelTables.tryLock(LockTimeoutMillis, TimeUnit.MILLISECONDS)) {
try {
boolean createNewConnection = false;
cw = this.channelTables.get(addr);
if (cw != null) {
if (cw.isOK()) {
return cw.getChannel();
}
else if (!cw.getChannelFuture().isDone()) {
createNewConnection = false;
}
else {
this.channelTables.remove(addr);
createNewConnection = true;
}
}
else {
createNewConnection = true;
}
if (createNewConnection) {
ChannelFuture channelFuture = this.bootstrap.connect(RemotingHelper.string2SocketAddress(addr));
log.info("createChannel: begin to connect remote host[{}] asynchronously", addr);
cw = new ChannelWrapper(channelFuture);
this.channelTables.put(addr, cw);
}
} catch (Exception e) {
log.error("createChannel: create channel exception", e);
} finally {
this.lockChannelTables.unlock();
}
} else {
log.warn("createChannel: try to lock channel table, but timeout, {}ms", LockTimeoutMillis);
}
if (cw != null) {
ChannelFuture channelFuture = cw.getChannelFuture();
if (channelFuture.awaitUninterruptibly(this.nettyClientConfig.getConnectTimeoutMillis())) {
if (cw.isOK()) {
log.info("createChannel: connect remote host[{}] success, {}", addr, channelFuture.toString());
return cw.getChannel();
} else {
log.warn("createChannel: connect remote host[" + addr + "] failed, " + channelFuture.toString(), channelFuture.cause());
}
} else {
log.warn("createChannel: connect remote host[{}] timeout {}ms, {}", addr, this.nettyClientConfig.getConnectTimeoutMillis(),
channelFuture.toString());
}
}
return null;
}
@Override
public void invokeAsync(String addr, RemotingCommand request, long timeoutMillis, InvokeCallback invokeCallback)
throws InterruptedException, RemotingConnectException, RemotingTooMuchRequestException, RemotingTimeoutException,
RemotingSendRequestException {
final Channel channel = this.getAndCreateChannel(addr);
if (channel != null && channel.isActive()) {
try {
if (this.rpcHook != null) {
this.rpcHook.doBeforeRequest(addr, request);
}
this.invokeAsyncImpl(channel, request, timeoutMillis, invokeCallback);
} catch (RemotingSendRequestException e) {
log.warn("invokeAsync: send request exception, so close the channel[{}]", addr);
this.closeChannel(addr, channel);
throw e;
}
} else {
this.closeChannel(addr, channel);
throw new RemotingConnectException(addr);
}
}
@Override
public void invokeOneway(String addr, RemotingCommand request, long timeoutMillis) throws InterruptedException,
RemotingConnectException, RemotingTooMuchRequestException, RemotingTimeoutException, RemotingSendRequestException {
final Channel channel = this.getAndCreateChannel(addr);
if (channel != null && channel.isActive()) {
try {
if (this.rpcHook != null) {
this.rpcHook.doBeforeRequest(addr, request);
}
this.invokeOnewayImpl(channel, request, timeoutMillis);
} catch (RemotingSendRequestException e) {
log.warn("invokeOneway: send request exception, so close the channel[{}]", addr);
this.closeChannel(addr, channel);
throw e;
}
} else {
this.closeChannel(addr, channel);
throw new RemotingConnectException(addr);
}
}
@Override
public void registerProcessor(int requestCode, NettyRequestProcessor processor, ExecutorService executor) {
ExecutorService executorThis = executor;
if (null == executor) {
executorThis = this.publicExecutor;
}
Pair<NettyRequestProcessor, ExecutorService> pair = new Pair<NettyRequestProcessor, ExecutorService>(processor, executorThis);
this.processorTable.put(requestCode, pair);
}
@Override
public boolean isChannelWriteable(String addr) {
ChannelWrapper cw = this.channelTables.get(addr);
if (cw != null && cw.isOK()) {
return cw.isWriteable();
}
return true;
}
@Override
public ChannelEventListener getChannelEventListener() {
return channelEventListener;
}
@Override
public RPCHook getRPCHook() {
return this.rpcHook;
}
@Override
public ExecutorService getCallbackExecutor() {
return this.publicExecutor;
}
public List<String> getNamesrvAddrList() {
return namesrvAddrList.get();
}
public RPCHook getRpcHook() {
return rpcHook;
}
static class ChannelWrapper {
private final ChannelFuture channelFuture;
public ChannelWrapper(ChannelFuture channelFuture) {
this.channelFuture = channelFuture;
}
public boolean isOK() {
return this.channelFuture.channel() != null && this.channelFuture.channel().isActive();
}
public boolean isWriteable() {
return this.channelFuture.channel().isWritable();
}
private Channel getChannel() {
return this.channelFuture.channel();
}
public ChannelFuture getChannelFuture() {
return channelFuture;
}
}
class NettyClientHandler extends SimpleChannelInboundHandler<RemotingCommand> {
@Override
protected void channelRead0(ChannelHandlerContext ctx, RemotingCommand msg) throws Exception {
processMessageReceived(ctx, msg);
}
}
class NettyConnetManageHandler extends ChannelDuplexHandler {
@Override
public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise)
throws Exception {
final String local = localAddress == null ? "UNKNOW" : localAddress.toString();
final String remote = remoteAddress == null ? "UNKNOW" : remoteAddress.toString();
log.info("NETTY CLIENT PIPELINE: CONNECT {} => {}", local, remote);
super.connect(ctx, remoteAddress, localAddress, promise);
if (NettyRemotingClient.this.channelEventListener != null) {
NettyRemotingClient.this.putNettyEvent(new NettyEvent(NettyEventType.CONNECT, remoteAddress.toString(), ctx.channel()));
}
}
@Override
public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
final String remoteAddress = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
log.info("NETTY CLIENT PIPELINE: DISCONNECT {}", remoteAddress);
closeChannel(ctx.channel());
super.disconnect(ctx, promise);
if (NettyRemotingClient.this.channelEventListener != null) {
NettyRemotingClient.this.putNettyEvent(new NettyEvent(NettyEventType.CLOSE, remoteAddress.toString(), ctx.channel()));
}
}
@Override
public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
final String remoteAddress = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
log.info("NETTY CLIENT PIPELINE: CLOSE {}", remoteAddress);
closeChannel(ctx.channel());
super.close(ctx, promise);
if (NettyRemotingClient.this.channelEventListener != null) {
NettyRemotingClient.this.putNettyEvent(new NettyEvent(NettyEventType.CLOSE, remoteAddress.toString(), ctx.channel()));
}
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof IdleStateEvent) {
IdleStateEvent evnet = (IdleStateEvent) evt;
if (evnet.state().equals(IdleState.ALL_IDLE)) {
final String remoteAddress = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
log.warn("NETTY CLIENT PIPELINE: IDLE exception [{}]", remoteAddress);
closeChannel(ctx.channel());
if (NettyRemotingClient.this.channelEventListener != null) {
NettyRemotingClient.this
.putNettyEvent(new NettyEvent(NettyEventType.IDLE, remoteAddress.toString(), ctx.channel()));
}
}
}
ctx.fireUserEventTriggered(evt);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
final String remoteAddress = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
log.warn("NETTY CLIENT PIPELINE: exceptionCaught {}", remoteAddress);
log.warn("NETTY CLIENT PIPELINE: exceptionCaught exception.", cause);
closeChannel(ctx.channel());
if (NettyRemotingClient.this.channelEventListener != null) {
NettyRemotingClient.this.putNettyEvent(new NettyEvent(NettyEventType.EXCEPTION, remoteAddress.toString(), ctx.channel()));
}
}
}
}
| {
"content_hash": "ec96a3d3212f104fad9f333086d86546",
"timestamp": "",
"source": "github",
"line_count": 663,
"max_line_length": 140,
"avg_line_length": 40.04374057315234,
"alnum_prop": 0.5825454819390561,
"repo_name": "Todd-start/RocketMQ",
"id": "8c9d5fd82b9794803f928ff3b6ff60f88e1a7c19",
"size": "27356",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rocketmq-remoting/src/main/java/com/alibaba/rocketmq/remoting/netty/NettyRemotingClient.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4379"
},
{
"name": "Java",
"bytes": "5094125"
},
{
"name": "Shell",
"bytes": "39893"
}
],
"symlink_target": ""
} |
package mist.api.encoding
import mist.api.data.{JsData, JsMap}
import shadedshapeless.labelled.FieldType
import shadedshapeless._
import scala.annotation.implicitNotFound
@implicitNotFound(msg =
"Couldn't find mist.api.encoding.ObjectEncoder[${A}]" +
" Ensure that JsEncoder instances exists for it's fields" +
" or add `import mist.api.encoding.defaults._`"
)
trait ObjectEncoder[A] { self =>
def apply(a : A): JsMap
}
object ObjectEncoder {
def apply[A](implicit enc: ObjectEncoder[A]): ObjectEncoder[A] = enc
def create[A](f: A => JsMap): ObjectEncoder[A] = new ObjectEncoder[A] {
override def apply(a: A): JsMap = f(a)
}
implicit val hNilEnc: ObjectEncoder[HNil] = ObjectEncoder.create[HNil](_ => JsMap.empty)
implicit def hlistExt[K <: Symbol, H, T <: HList](implicit
witness: Witness.Aux[K],
lHenc: Lazy[JsEncoder[H]],
tEnc: ObjectEncoder[T]
): ObjectEncoder[FieldType[K, H] :: T] = {
val hEnc = lHenc.value
val key = witness.value.name
ObjectEncoder.create[FieldType[K, H] :: T](hlist => {
val h = hEnc(hlist.head)
val t = tEnc(hlist.tail)
val values = (key -> h) +: t.fields
JsMap(values: _*)
})
}
implicit def labelled[A, H <: HList](implicit labGen: LabelledGeneric.Aux[A, H], enc: ObjectEncoder[H]): ObjectEncoder[A] =
ObjectEncoder.create(a => enc(labGen.to(a)))
}
| {
"content_hash": "1c6c3b81770386d1167cece2ba29509c",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 125,
"avg_line_length": 30.444444444444443,
"alnum_prop": 0.67007299270073,
"repo_name": "Hydrospheredata/mist",
"id": "c813652441d7612d83b9eaa4d5c3da8efddf262f",
"size": "1370",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mist-lib/src/main/scala/mist/api/encoding/ObjectEncoder.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "5043"
},
{
"name": "Java",
"bytes": "277"
},
{
"name": "Python",
"bytes": "20290"
},
{
"name": "Scala",
"bytes": "627775"
},
{
"name": "Shell",
"bytes": "5236"
}
],
"symlink_target": ""
} |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.navercorp.pinpoint</groupId>
<artifactId>pinpoint</artifactId>
<relativePath>../..</relativePath>
<version>1.8.0-SNAPSHOT</version>
</parent>
<artifactId>pinpoint-commons-dbcp-plugin</artifactId>
<name>pinpoint-commons-dbcp-plugin</name>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>com.navercorp.pinpoint</groupId>
<artifactId>pinpoint-bootstrap-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</project>
| {
"content_hash": "a0e42632193f7e4f39d1fcc8968029d9",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 104,
"avg_line_length": 37.25925925925926,
"alnum_prop": 0.6332007952286283,
"repo_name": "chenguoxi1985/pinpoint",
"id": "c5053fbcec09e633a886fa744004671ef3ee83ec",
"size": "1006",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins/dbcp/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "27926"
},
{
"name": "CSS",
"bytes": "156512"
},
{
"name": "CoffeeScript",
"bytes": "10124"
},
{
"name": "Groovy",
"bytes": "1423"
},
{
"name": "HTML",
"bytes": "631701"
},
{
"name": "Java",
"bytes": "13132756"
},
{
"name": "JavaScript",
"bytes": "4813332"
},
{
"name": "Makefile",
"bytes": "5246"
},
{
"name": "PLSQL",
"bytes": "4156"
},
{
"name": "Python",
"bytes": "3523"
},
{
"name": "Ruby",
"bytes": "943"
},
{
"name": "Shell",
"bytes": "37985"
},
{
"name": "Thrift",
"bytes": "14690"
}
],
"symlink_target": ""
} |
Something gets lost and eventually will be found again.
Real world trousers temporarily store things for you, these trousers feature nearly unlimited storage and persistence plus filtering capabilities. Just add more pockets!
One could say this is nothing more than another Stickies app but let's just say, it can _also_ be a Stickies app.
## What's this?
__Trousers__ are keeping things organized for you! Though they are not a schematic storage within which everything has its right place, they are rather storing things simply organized by _pockets_.
It should always follow the principle of _easy store – complex retrieval_. That is, it should always be easier to store something than it is to lookup a certain note. Just like the real-world trousers, it is easier to just put something in one of your pockets than it is to find something you have already put there.
Unlike real-world pants, __Trousers__ eases the pain of finding something within all of your pockets.
## But how?
Hey, this is nerdy alpha stuff! You need a __CouchDB__ and __ruby__. If you feel uncomfortable with using things like that, __Trousers__ might not be what you expect. Otherwise, feel free to review my code since it is not very much.
The usage is pretty simple. For installation just run `bundle exec rake build` and `bundle exec rake install` to install __Trousers__ as a gem (works with rvm and ruby 1.9.2 as well…).
Afterwards run `pants` or `pockets` which will list the rest of the available commands.
Now you can store something in you trousers, given that you have a __CouchDB__ running on `localhost:5984` that is _admin partying_.
If your __CouchDB__ is not _admin partying_ please create a DB named __trousers__ before using the above commands.
## Contributing to trousers
* As said, this is alpha or rather pre-alpha stage software! Beware as it is also untested. Since there is only on lib and a Thorfile you should not have problems reviewing the code before extending or fixing anything. Otherwise just ask!
* Check out the latest master to make sure the feature hasn't been implemented or the bug hasn't been fixed yet
* Check out the issue tracker to make sure someone already hasn't requested it and/or contributed it
* Fork the project
* Start a feature/bugfix branch
* Commit and push until you are happy with your contribution
* Make sure to add tests for it. This is important so I don't break it in a future version unintentionally.
* Please try not to mess with the Rakefile, version, or history. If you want to have your own version, or is otherwise necessary, that is fine, but please isolate to its own commit so I can cherry-pick around it.
## Copyright
Copyright (c) 2010 lennart. See LICENSE.txt for
further details.
| {
"content_hash": "5a43670656d78804d2147c586c9553ff",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 317,
"avg_line_length": 65.5952380952381,
"alnum_prop": 0.770961887477314,
"repo_name": "lennart/trousers",
"id": "2a5a06b01c93f59c193993861c3971e0347313ff",
"size": "2771",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "5131"
}
],
"symlink_target": ""
} |
from south.v2 import DataMigration
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
second_edition_courses = orm['courses.course'].objects.filter(created_from__isnull=False)
for course in second_edition_courses:
users = course.students.filter(pk__in=course.created_from.students.values('pk').query)
orm['courses.CourseStudent'].objects.filter(student__in=users,
course=course).update(old_course_status='n')
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '254'})
},
'badges.alignment': {
'Meta': {'object_name': 'Alignment'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'badges.badge': {
'Meta': {'ordering': "['-modified', '-created']", 'object_name': 'Badge'},
'alignments': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "u'alignments'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['badges.Alignment']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'criteria': ('django.db.models.fields.URLField', [], {'max_length': '255'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "u'tags'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['badges.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'badges.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'courses.announcement': {
'Meta': {'object_name': 'Announcement'},
'content': ('tinymce.models.HTMLField', [], {}),
'course': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['courses.Course']"}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'courses.attachment': {
'Meta': {'object_name': 'Attachment'},
'attachment': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kq': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['courses.KnowledgeQuantum']"})
},
'courses.course': {
'Meta': {'ordering': "['order']", 'object_name': 'Course'},
'certification_available': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'certification_banner': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'completion_badge': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'course'", 'null': 'True', 'to': "orm['badges.Badge']"}),
'created_from': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'courses_created_of'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['courses.Course']"}),
'description': ('tinymce.models.HTMLField', [], {}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'enrollment_method': ('django.db.models.fields.CharField', [], {'default': "'free'", 'max_length': '200'}),
'estimated_effort': ('tinymce.models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intended_audience': ('tinymce.models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'learning_goals': ('tinymce.models.HTMLField', [], {'blank': 'True'}),
'max_reservations_pending': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '8'}),
'max_reservations_total': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '8'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'courses_as_owner'", 'to': "orm['auth.User']"}),
'promotion_media_content_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'promotion_media_content_type': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'requirements': ('tinymce.models.HTMLField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'static_page': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['courses.StaticPage']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'d'", 'max_length': '10'}),
'students': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'courses_as_student'", 'blank': 'True', 'through': "orm['courses.CourseStudent']", 'to': "orm['auth.User']"}),
'teachers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'courses_as_teacher'", 'symmetrical': 'False', 'through': "orm['courses.CourseTeacher']", 'to': "orm['auth.User']"}),
'threshold': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '4', 'decimal_places': '2', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
'courses.coursestudent': {
'Meta': {'object_name': 'CourseStudent'},
'course': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['courses.Course']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'old_course_status': ('django.db.models.fields.CharField', [], {'default': "'f'", 'max_length': '1'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'courses.courseteacher': {
'Meta': {'ordering': "['order']", 'object_name': 'CourseTeacher'},
'course': ('adminsortable.fields.SortableForeignKey', [], {'to': "orm['courses.Course']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'teacher': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'courses.knowledgequantum': {
'Meta': {'ordering': "['order']", 'unique_together': "(('title', 'unit'),)", 'object_name': 'KnowledgeQuantum'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'media_content_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'media_content_type': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'supplementary_material': ('tinymce.models.HTMLField', [], {'blank': 'True'}),
'teacher_comments': ('tinymce.models.HTMLField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'unit': ('adminsortable.fields.SortableForeignKey', [], {'to': "orm['courses.Unit']"}),
'weight': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'})
},
'courses.option': {
'Meta': {'unique_together': "(('question', 'x', 'y'),)", 'object_name': 'Option'},
'feedback': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'height': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '12'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'optiontype': ('django.db.models.fields.CharField', [], {'default': "'t'", 'max_length': '1'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['courses.Question']"}),
'solution': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '100'}),
'x': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'y': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'courses.question': {
'Meta': {'object_name': 'Question'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kq': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['courses.KnowledgeQuantum']", 'unique': 'True'}),
'last_frame': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'solution_media_content_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'solution_media_content_type': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'solution_text': ('tinymce.models.HTMLField', [], {'blank': 'True'}),
'use_last_frame': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'courses.staticpage': {
'Meta': {'object_name': 'StaticPage'},
'body': ('tinymce.models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
'courses.unit': {
'Meta': {'ordering': "['order']", 'unique_together': "(('title', 'course'),)", 'object_name': 'Unit'},
'course': ('adminsortable.fields.SortableForeignKey', [], {'to': "orm['courses.Course']"}),
'deadline': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'d'", 'max_length': '10'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'unittype': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'weight': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'})
}
}
complete_apps = ['courses']
symmetrical = True
| {
"content_hash": "a38da37814abc8e98b35fa59a81642af",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 232,
"avg_line_length": 82.03092783505154,
"alnum_prop": 0.5480080432323741,
"repo_name": "GeographicaGS/moocng",
"id": "2af9d77c8c886eeb3ceee8782562287e6c7c0b59",
"size": "15938",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "moocng/courses/migrations/0027_fill_old_course_status.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "162701"
},
{
"name": "HTML",
"bytes": "362912"
},
{
"name": "JavaScript",
"bytes": "1911286"
},
{
"name": "Python",
"bytes": "2723710"
},
{
"name": "Shell",
"bytes": "24842"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>schroeder: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.15.1 / schroeder - 8.10.0</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
schroeder
<small>
8.10.0
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-05-30 04:18:43 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-05-30 04:18:43 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-threads base
base-unix base
conf-findutils 1 Virtual package relying on findutils
conf-gmp 4 Virtual package relying on a GMP lib system installation
coq 8.15.1 Formal proof management system
dune 3.2.0 Fast, portable, and opinionated build system
ocaml 4.14.0 The OCaml compiler (virtual package)
ocaml-base-compiler 4.14.0 Official release 4.14.0
ocaml-config 2 OCaml Switch Configuration
ocaml-options-vanilla 1 Ensure that OCaml is compiled with no special options enabled
ocamlfind 1.9.3 A library manager for OCaml
zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers
# opam file:
opam-version: "2.0"
maintainer: "Hugo.Herbelin@inria.fr"
homepage: "https://github.com/coq-contribs/schroeder"
license: "LGPL 2.1"
build: [make "-j%{jobs}%"]
install: [make "install"]
remove: ["rm" "-R" "%{lib}%/coq/user-contrib/Schroeder"]
depends: [
"ocaml"
"coq" {>= "8.10" & < "8.11~"}
]
tags: [
"keyword: Schroeder-Bernstein"
"keyword: set theory"
"category: Mathematics/Logic/Set theory"
]
authors: [
"Hugo herbelin"
]
bug-reports: "https://github.com/coq-contribs/schroeder/issues"
dev-repo: "git+https://github.com/coq-contribs/schroeder.git"
synopsis: "The Theorem of Schroeder-Bernstein"
description: """
Fraenkel's proof of Schroeder-Bernstein theorem on decidable sets
is formalized in a constructive variant of set theory based on
stratified universes (the one defined in the Ensemble library).
The informal proof can be found for instance in "Axiomatic Set Theory"
from P. Suppes."""
flags: light-uninstall
url {
src: "https://github.com/coq-contribs/schroeder/archive/v8.10.0.tar.gz"
checksum: "md5=80dc98ed34340e31be9385fcd655e50d"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-schroeder.8.10.0 coq.8.15.1</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.15.1).
The following dependencies couldn't be met:
- coq-schroeder -> coq < 8.11~ -> ocaml < 4.10
base of this switch (use `--unlock-base' to force)
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-schroeder.8.10.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {
"content_hash": "1a5bbabece4005f6a8925e333ae7c606",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 159,
"avg_line_length": 41.189655172413794,
"alnum_prop": 0.5476489465606251,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "cd23557df5643f815939d933720aacc4c40c3a47",
"size": "7192",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.14.0-2.0.10/released/8.15.1/schroeder/8.10.0.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
using base::TimeDelta;
using base::TimeTicks;
using content::RenderViewHost;
using content::SessionStorageNamespace;
namespace {
void Send(int child_id, IPC::Message* raw_message) {
using content::RenderProcessHost;
scoped_ptr<IPC::Message> own_message(raw_message);
RenderProcessHost* render_process_host = RenderProcessHost::FromID(child_id);
if (!render_process_host)
return;
render_process_host->Send(own_message.release());
}
} // namespace
namespace prerender {
PrerenderLinkManager::PrerenderLinkManager(PrerenderManager* manager)
: has_shutdown_(false),
manager_(manager) {
}
PrerenderLinkManager::~PrerenderLinkManager() {
for (std::list<LinkPrerender>::iterator i = prerenders_.begin();
i != prerenders_.end(); ++i) {
if (i->handle) {
DCHECK(!i->handle->IsPrerendering())
<< "All running prerenders should stop at the same time as the "
<< "PrerenderManager.";
delete i->handle;
i->handle = 0;
}
}
}
void PrerenderLinkManager::OnAddPrerender(int launcher_child_id,
int prerender_id,
const GURL& url,
const content::Referrer& referrer,
const gfx::Size& size,
int render_view_route_id) {
// TODO(gavinp): Determine why WebKit appears to be sending duplicate adds,
// and prevent it.
if (FindByLauncherChildIdAndPrerenderId(launcher_child_id, prerender_id))
return;
content::RenderProcessHost* rph =
content::RenderProcessHost::FromID(launcher_child_id);
// Guests inside <webview> do not support cross-process navigation and so we
// do not allow guests to prerender content.
if (rph && rph->IsGuest())
return;
LinkPrerender
prerender(launcher_child_id, prerender_id, url, referrer, size,
render_view_route_id, manager_->GetCurrentTimeTicks());
prerenders_.push_back(prerender);
StartPrerenders();
}
void PrerenderLinkManager::OnCancelPrerender(int child_id, int prerender_id) {
LinkPrerender* prerender = FindByLauncherChildIdAndPrerenderId(child_id,
prerender_id);
if (!prerender)
return;
// Remove the handle from the PrerenderLinkManager before we cancel this
// prerender, to avoid reentering the PrerenderLinkManager, sending events to
// the underlying prerender and making a second erase.
scoped_ptr<PrerenderHandle> own_prerender_handle(prerender->handle);
prerender->handle = NULL;
RemovePrerender(prerender);
if (own_prerender_handle)
own_prerender_handle->OnCancel();
StartPrerenders();
}
void PrerenderLinkManager::OnAbandonPrerender(int child_id, int prerender_id) {
LinkPrerender* prerender = FindByLauncherChildIdAndPrerenderId(child_id,
prerender_id);
if (!prerender)
return;
if (!prerender->handle) {
RemovePrerender(prerender);
return;
}
prerender->handle->OnNavigateAway();
DCHECK(prerender->handle);
// If the prerender is not running, remove it from the list so it does not
// leak. If it is running, it will send a cancel event when it stops which
// will remove it.
if (!prerender->handle->IsPrerendering())
RemovePrerender(prerender);
}
void PrerenderLinkManager::OnChannelClosing(int child_id) {
std::list<LinkPrerender>::iterator next = prerenders_.begin();
while (next != prerenders_.end()) {
std::list<LinkPrerender>::iterator it = next;
++next;
if (child_id != it->launcher_child_id)
continue;
const size_t running_prerender_count = CountRunningPrerenders();
OnAbandonPrerender(child_id, it->prerender_id);
DCHECK_EQ(running_prerender_count, CountRunningPrerenders());
}
}
PrerenderLinkManager::LinkPrerender::LinkPrerender(
int launcher_child_id,
int prerender_id,
const GURL& url,
const content::Referrer& referrer,
const gfx::Size& size,
int render_view_route_id,
TimeTicks creation_time) : launcher_child_id(launcher_child_id),
prerender_id(prerender_id),
url(url),
referrer(referrer),
size(size),
render_view_route_id(render_view_route_id),
creation_time(creation_time),
handle(NULL) {
}
PrerenderLinkManager::LinkPrerender::~LinkPrerender() {
DCHECK_EQ(static_cast<PrerenderHandle*>(NULL), handle)
<< "The PrerenderHandle should be destroyed before its Prerender.";
}
bool PrerenderLinkManager::IsEmpty() const {
return prerenders_.empty();
}
size_t PrerenderLinkManager::CountRunningPrerenders() const {
size_t retval = 0;
for (std::list<LinkPrerender>::const_iterator i = prerenders_.begin();
i != prerenders_.end(); ++i) {
if (i->handle && i->handle->IsPrerendering())
++retval;
}
return retval;
}
void PrerenderLinkManager::StartPrerenders() {
if (has_shutdown_)
return;
size_t total_started_prerender_count = 0;
std::multiset<std::pair<int, int> >
running_launcher_and_render_view_routes;
// Scan the list, counting how many prerenders have handles (and so were added
// to the PrerenderManager). The count is done for the system as a whole, and
// also per launcher.
for (std::list<LinkPrerender>::iterator i = prerenders_.begin();
i != prerenders_.end(); ++i) {
if (i->handle) {
++total_started_prerender_count;
std::pair<int, int> launcher_and_render_view_route(
i->launcher_child_id, i->render_view_route_id);
running_launcher_and_render_view_routes.insert(
launcher_and_render_view_route);
DCHECK_GE(manager_->config().max_link_concurrency_per_launcher,
running_launcher_and_render_view_routes.count(
launcher_and_render_view_route));
}
DCHECK_EQ(&(*i), FindByLauncherChildIdAndPrerenderId(i->launcher_child_id,
i->prerender_id));
}
DCHECK_GE(manager_->config().max_link_concurrency,
total_started_prerender_count);
DCHECK_LE(CountRunningPrerenders(), total_started_prerender_count);
TimeTicks now = manager_->GetCurrentTimeTicks();
// Scan the list again, starting prerenders as our counts allow.
std::list<LinkPrerender>::iterator next = prerenders_.begin();
while (next != prerenders_.end()) {
std::list<LinkPrerender>::iterator i = next;
++next;
if (total_started_prerender_count >=
manager_->config().max_link_concurrency ||
total_started_prerender_count >= prerenders_.size()) {
// The system is already at its prerender concurrency limit.
return;
}
if (i->handle) {
// This prerender has already been added to the prerender manager.
continue;
}
TimeDelta prerender_age = now - i->creation_time;
if (prerender_age >= manager_->config().max_wait_to_launch) {
// This prerender waited too long in the queue before launching.
prerenders_.erase(i);
continue;
}
std::pair<int, int> launcher_and_render_view_route(
i->launcher_child_id, i->render_view_route_id);
if (manager_->config().max_link_concurrency_per_launcher <=
running_launcher_and_render_view_routes.count(
launcher_and_render_view_route)) {
// This prerender's launcher is already at its limit.
continue;
}
PrerenderHandle* handle = manager_->AddPrerenderFromLinkRelPrerender(
i->launcher_child_id, i->render_view_route_id,
i->url, i->referrer, i->size);
if (!handle) {
// This prerender couldn't be launched, it's gone.
prerenders_.erase(i);
continue;
}
// We have successfully started a new prerender.
i->handle = handle;
++total_started_prerender_count;
handle->SetObserver(this);
if (handle->IsPrerendering())
OnPrerenderStart(handle);
running_launcher_and_render_view_routes.insert(
launcher_and_render_view_route);
}
}
PrerenderLinkManager::LinkPrerender*
PrerenderLinkManager::FindByLauncherChildIdAndPrerenderId(int launcher_child_id,
int prerender_id) {
for (std::list<LinkPrerender>::iterator i = prerenders_.begin();
i != prerenders_.end(); ++i) {
if (launcher_child_id == i->launcher_child_id &&
prerender_id == i->prerender_id) {
return &(*i);
}
}
return NULL;
}
PrerenderLinkManager::LinkPrerender*
PrerenderLinkManager::FindByPrerenderHandle(PrerenderHandle* prerender_handle) {
DCHECK(prerender_handle);
for (std::list<LinkPrerender>::iterator i = prerenders_.begin();
i != prerenders_.end(); ++i) {
if (prerender_handle == i->handle)
return &(*i);
}
return NULL;
}
void PrerenderLinkManager::RemovePrerender(LinkPrerender* prerender) {
for (std::list<LinkPrerender>::iterator i = prerenders_.begin();
i != prerenders_.end(); ++i) {
if (&(*i) == prerender) {
scoped_ptr<PrerenderHandle> own_handle(i->handle);
i->handle = NULL;
prerenders_.erase(i);
return;
}
}
NOTREACHED();
}
void PrerenderLinkManager::Shutdown() {
has_shutdown_ = true;
}
// In practice, this is always called from either
// PrerenderLinkManager::OnAddPrerender in the regular case, or in the pending
// prerender case, from PrerenderHandle::AdoptPrerenderDataFrom.
void PrerenderLinkManager::OnPrerenderStart(
PrerenderHandle* prerender_handle) {
LinkPrerender* prerender = FindByPrerenderHandle(prerender_handle);
if (!prerender)
return;
Send(prerender->launcher_child_id,
new PrerenderMsg_OnPrerenderStart(prerender->prerender_id));
}
void PrerenderLinkManager::OnPrerenderStopLoading(
PrerenderHandle* prerender_handle) {
LinkPrerender* prerender = FindByPrerenderHandle(prerender_handle);
if (!prerender)
return;
Send(prerender->launcher_child_id,
new PrerenderMsg_OnPrerenderStopLoading(prerender->prerender_id));
}
void PrerenderLinkManager::OnPrerenderStop(
PrerenderHandle* prerender_handle) {
LinkPrerender* prerender = FindByPrerenderHandle(prerender_handle);
if (!prerender)
return;
Send(prerender->launcher_child_id,
new PrerenderMsg_OnPrerenderStop(prerender->prerender_id));
RemovePrerender(prerender);
StartPrerenders();
}
void PrerenderLinkManager::OnPrerenderAddAlias(
PrerenderHandle* prerender_handle,
const GURL& alias_url) {
LinkPrerender* prerender = FindByPrerenderHandle(prerender_handle);
if (!prerender)
return;
Send(prerender->launcher_child_id,
new PrerenderMsg_OnPrerenderAddAlias(prerender->prerender_id,
alias_url));
}
} // namespace prerender
| {
"content_hash": "37f83c001ca160bc61f24a8674c2b599",
"timestamp": "",
"source": "github",
"line_count": 331,
"max_line_length": 80,
"avg_line_length": 33.329305135951664,
"alnum_prop": 0.6501087744742567,
"repo_name": "zcbenz/cefode-chromium",
"id": "d27a81e9264d230db4a31b1ea9a4d37685b70009",
"size": "11949",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chrome/browser/prerender/prerender_link_manager.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "853"
},
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "1174304"
},
{
"name": "Awk",
"bytes": "9519"
},
{
"name": "C",
"bytes": "76026099"
},
{
"name": "C#",
"bytes": "1132"
},
{
"name": "C++",
"bytes": "157904700"
},
{
"name": "DOT",
"bytes": "1559"
},
{
"name": "F#",
"bytes": "381"
},
{
"name": "Java",
"bytes": "3225038"
},
{
"name": "JavaScript",
"bytes": "18180217"
},
{
"name": "Logos",
"bytes": "4517"
},
{
"name": "Matlab",
"bytes": "5234"
},
{
"name": "Objective-C",
"bytes": "7139426"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "Perl",
"bytes": "932901"
},
{
"name": "Python",
"bytes": "8654916"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Ragel in Ruby Host",
"bytes": "3621"
},
{
"name": "Shell",
"bytes": "1533012"
},
{
"name": "Tcl",
"bytes": "277077"
},
{
"name": "XML",
"bytes": "13493"
}
],
"symlink_target": ""
} |
<component name="libraryTable">
<library name="Maven: egovframework.rte:egovframework.rte.fdl.logging:3.7.0">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/egovframework/rte/egovframework.rte.fdl.logging/3.7.0/egovframework.rte.fdl.logging-3.7.0.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/egovframework/rte/egovframework.rte.fdl.logging/3.7.0/egovframework.rte.fdl.logging-3.7.0-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/egovframework/rte/egovframework.rte.fdl.logging/3.7.0/egovframework.rte.fdl.logging-3.7.0-sources.jar!/" />
</SOURCES>
</library>
</component> | {
"content_hash": "872feb0231d523a963d504c494e60c5d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 149,
"avg_line_length": 51.23076923076923,
"alnum_prop": 0.6966966966966966,
"repo_name": "dasomel/egovframework",
"id": "4ce46278d78dc4578acb4386f86e25ea3a09a19c",
"size": "666",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".idea/libraries/Maven__egovframework_rte_egovframework_rte_fdl_logging_3_7_0.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "7c0cde65bc661ee00993fa6924a35af3",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "3d578693a4c54f4ffc3aad0d4453fd470d9254a3",
"size": "191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Gesneriaceae/Paradrymonia/Paradrymonia conferta/ Syn. Centrosolenia conferta/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.19: https://docutils.sourceforge.io/" />
<meta name="viewport" content="width=device-width,initial-scale=1">
<meta http-equiv="x-ua-compatible" content="ie=edge">
<meta name="lang:clipboard.copy" content="Copy to clipboard">
<meta name="lang:clipboard.copied" content="Copied to clipboard">
<meta name="lang:search.language" content="en">
<meta name="lang:search.pipeline.stopwords" content="True">
<meta name="lang:search.pipeline.trimmer" content="True">
<meta name="lang:search.result.none" content="No matching documents">
<meta name="lang:search.result.one" content="1 matching document">
<meta name="lang:search.result.other" content="# matching documents">
<meta name="lang:search.tokenizer" content="[\s\-]+">
<link href="https://fonts.gstatic.com/" rel="preconnect" crossorigin>
<link href="https://fonts.googleapis.com/css?family=Roboto+Mono:400,500,700|Roboto:300,400,400i,700&display=fallback" rel="stylesheet">
<style>
body,
input {
font-family: "Roboto", "Helvetica Neue", Helvetica, Arial, sans-serif
}
code,
kbd,
pre {
font-family: "Roboto Mono", "Courier New", Courier, monospace
}
</style>
<link rel="stylesheet" href="../_static/stylesheets/application.css"/>
<link rel="stylesheet" href="../_static/stylesheets/application-palette.css"/>
<link rel="stylesheet" href="../_static/stylesheets/application-fixes.css"/>
<link rel="stylesheet" href="../_static/fonts/material-icons.css"/>
<meta name="theme-color" content="#3f51b5">
<script src="../_static/javascripts/modernizr.js"></script>
<title>statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike_mu — statsmodels</title>
<link rel="icon" type="image/png" sizes="32x32" href="../_static/icons/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="16x16" href="../_static/icons/favicon-16x16.png">
<link rel="manifest" href="../_static/icons/site.webmanifest">
<link rel="mask-icon" href="../_static/icons/safari-pinned-tab.svg" color="#919191">
<meta name="msapplication-TileColor" content="#2b5797">
<meta name="msapplication-config" content="../_static/icons/browserconfig.xml">
<link rel="stylesheet" href="../_static/stylesheets/examples.css">
<link rel="stylesheet" href="../_static/stylesheets/deprecation.css">
<link rel="stylesheet" type="text/css" href="../_static/pygments.css" />
<link rel="stylesheet" type="text/css" href="../_static/material.css" />
<link rel="stylesheet" type="text/css" href="../_static/graphviz.css" />
<link rel="stylesheet" type="text/css" href="../_static/plot_directive.css" />
<script data-url_root="../" id="documentation_options" src="../_static/documentation_options.js"></script>
<script src="../_static/jquery.js"></script>
<script src="../_static/underscore.js"></script>
<script src="../_static/_sphinx_javascript_frameworks_compat.js"></script>
<script src="../_static/doctools.js"></script>
<script src="../_static/sphinx_highlight.js"></script>
<script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script>
<link rel="shortcut icon" href="../_static/favicon.ico"/>
<link rel="author" title="About these documents" href="../about.html" />
<link rel="index" title="Index" href="../genindex.html" />
<link rel="search" title="Search" href="../search.html" />
<link rel="next" title="statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.mean_deriv" href="statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.mean_deriv.html" />
<link rel="prev" title="statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike" href="statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike.html" />
</head>
<body dir=ltr
data-md-color-primary=indigo data-md-color-accent=blue>
<svg class="md-svg">
<defs data-children-count="0">
<svg xmlns="http://www.w3.org/2000/svg" width="416" height="448" viewBox="0 0 416 448" id="__github"><path fill="currentColor" d="M160 304q0 10-3.125 20.5t-10.75 19T128 352t-18.125-8.5-10.75-19T96 304t3.125-20.5 10.75-19T128 256t18.125 8.5 10.75 19T160 304zm160 0q0 10-3.125 20.5t-10.75 19T288 352t-18.125-8.5-10.75-19T256 304t3.125-20.5 10.75-19T288 256t18.125 8.5 10.75 19T320 304zm40 0q0-30-17.25-51T296 232q-10.25 0-48.75 5.25Q229.5 240 208 240t-39.25-2.75Q130.75 232 120 232q-29.5 0-46.75 21T56 304q0 22 8 38.375t20.25 25.75 30.5 15 35 7.375 37.25 1.75h42q20.5 0 37.25-1.75t35-7.375 30.5-15 20.25-25.75T360 304zm56-44q0 51.75-15.25 82.75-9.5 19.25-26.375 33.25t-35.25 21.5-42.5 11.875-42.875 5.5T212 416q-19.5 0-35.5-.75t-36.875-3.125-38.125-7.5-34.25-12.875T37 371.5t-21.5-28.75Q0 312 0 260q0-59.25 34-99-6.75-20.5-6.75-42.5 0-29 12.75-54.5 27 0 47.5 9.875t47.25 30.875Q171.5 96 212 96q37 0 70 8 26.25-20.5 46.75-30.25T376 64q12.75 25.5 12.75 54.5 0 21.75-6.75 42 34 40 34 99.5z"/></svg>
</defs>
</svg>
<input class="md-toggle" data-md-toggle="drawer" type="checkbox" id="__drawer">
<input class="md-toggle" data-md-toggle="search" type="checkbox" id="__search">
<label class="md-overlay" data-md-component="overlay" for="__drawer"></label>
<a href="#generated/statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike_mu" tabindex="1" class="md-skip"> Skip to content </a>
<header class="md-header" data-md-component="header">
<nav class="md-header-nav md-grid">
<div class="md-flex navheader">
<div class="md-flex__cell md-flex__cell--shrink">
<a href="../index.html" title="statsmodels"
class="md-header-nav__button md-logo">
<img src="../_static/statsmodels-logo-v2-bw.svg" height="26"
alt="statsmodels logo">
</a>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<label class="md-icon md-icon--menu md-header-nav__button" for="__drawer"></label>
</div>
<div class="md-flex__cell md-flex__cell--stretch">
<div class="md-flex__ellipsis md-header-nav__title" data-md-component="title">
<span class="md-header-nav__topic">statsmodels 0.14.0 (+596)</span>
<span class="md-header-nav__topic"> statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike_mu </span>
</div>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<label class="md-icon md-icon--search md-header-nav__button" for="__search"></label>
<div class="md-search" data-md-component="search" role="dialog">
<label class="md-search__overlay" for="__search"></label>
<div class="md-search__inner" role="search">
<form class="md-search__form" action="../search.html" method="get" name="search">
<input type="text" class="md-search__input" name="q" placeholder="Search"
autocapitalize="off" autocomplete="off" spellcheck="false"
data-md-component="query" data-md-state="active">
<label class="md-icon md-search__icon" for="__search"></label>
<button type="reset" class="md-icon md-search__icon" data-md-component="reset" tabindex="-1">

</button>
</form>
<div class="md-search__output">
<div class="md-search__scrollwrap" data-md-scrollfix>
<div class="md-search-result" data-md-component="result">
<div class="md-search-result__meta">
Type to start searching
</div>
<ol class="md-search-result__list"></ol>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<div class="md-header-nav__source">
<a href="https://github.com/statsmodels/statsmodels" title="Go to repository" class="md-source" data-md-source="github">
<div class="md-source__icon">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 24 24" width="28" height="28">
<use xlink:href="#__github" width="24" height="24"></use>
</svg>
</div>
<div class="md-source__repository">
statsmodels
</div>
</a>
</div>
</div>
<script src="../_static/javascripts/version_dropdown.js"></script>
<script>
var json_loc = "../../versions-v2.json",
target_loc = "../../",
text = "Versions";
$( document ).ready( add_version_dropdown(json_loc, target_loc, text));
</script>
</div>
</nav>
</header>
<div class="md-container">
<nav class="md-tabs" data-md-component="tabs">
<div class="md-tabs__inner md-grid">
<ul class="md-tabs__list">
<li class="md-tabs__item"><a href="../user-guide.html" class="md-tabs__link">User Guide</a></li>
<li class="md-tabs__item"><a href="../gee.html" class="md-tabs__link">Generalized Estimating Equations</a></li>
<li class="md-tabs__item"><a href="statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.html" class="md-tabs__link">statsmodels.genmod.generalized_estimating_equations.OrdinalGEE</a></li>
</ul>
</div>
</nav>
<main class="md-main">
<div class="md-main__inner md-grid" data-md-component="container">
<div class="md-sidebar md-sidebar--primary" data-md-component="navigation">
<div class="md-sidebar__scrollwrap">
<div class="md-sidebar__inner">
<nav class="md-nav md-nav--primary" data-md-level="0">
<label class="md-nav__title md-nav__title--site" for="__drawer">
<a href="../index.html" title="statsmodels" class="md-nav__button md-logo">
<img src="../_static/statsmodels-logo-v2-bw.svg" alt=" logo" width="48" height="48">
</a>
<a href="../index.html"
title="statsmodels">statsmodels 0.14.0 (+596)</a>
</label>
<div class="md-nav__source">
<a href="https://github.com/statsmodels/statsmodels" title="Go to repository" class="md-source" data-md-source="github">
<div class="md-source__icon">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 24 24" width="28" height="28">
<use xlink:href="#__github" width="24" height="24"></use>
</svg>
</div>
<div class="md-source__repository">
statsmodels
</div>
</a>
</div>
<ul class="md-nav__list">
<li class="md-nav__item">
<a href="../install.html" class="md-nav__link">Installing statsmodels</a>
</li>
<li class="md-nav__item">
<a href="../gettingstarted.html" class="md-nav__link">Getting started</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html" class="md-nav__link">User Guide</a>
<ul class="md-nav__list">
<li class="md-nav__item">
<a href="../user-guide.html#background" class="md-nav__link">Background</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#regression-and-linear-models" class="md-nav__link">Regression and Linear Models</a>
<ul class="md-nav__list">
<li class="md-nav__item">
<a href="../regression.html" class="md-nav__link">Linear Regression</a>
</li>
<li class="md-nav__item">
<a href="../glm.html" class="md-nav__link">Generalized Linear Models</a>
</li>
<li class="md-nav__item">
<a href="../gee.html" class="md-nav__link">Generalized Estimating Equations</a>
</li>
<li class="md-nav__item">
<a href="../gam.html" class="md-nav__link">Generalized Additive Models (GAM)</a>
</li>
<li class="md-nav__item">
<a href="../rlm.html" class="md-nav__link">Robust Linear Models</a>
</li>
<li class="md-nav__item">
<a href="../mixed_linear.html" class="md-nav__link">Linear Mixed Effects Models</a>
</li>
<li class="md-nav__item">
<a href="../discretemod.html" class="md-nav__link">Regression with Discrete Dependent Variable</a>
</li>
<li class="md-nav__item">
<a href="../mixed_glm.html" class="md-nav__link">Generalized Linear Mixed Effects Models</a>
</li>
<li class="md-nav__item">
<a href="../anova.html" class="md-nav__link">ANOVA</a>
</li>
<li class="md-nav__item">
<a href="../other_models.html" class="md-nav__link">Other Models <code class="xref py py-mod docutils literal notranslate"><span class="pre">othermod</span></code></a>
</li></ul>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#time-series-analysis" class="md-nav__link">Time Series Analysis</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#other-models" class="md-nav__link">Other Models</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#statistics-and-tools" class="md-nav__link">Statistics and Tools</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#data-sets" class="md-nav__link">Data Sets</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#sandbox" class="md-nav__link">Sandbox</a>
</li></ul>
</li>
<li class="md-nav__item">
<a href="../examples/index.html" class="md-nav__link">Examples</a>
</li>
<li class="md-nav__item">
<a href="../api.html" class="md-nav__link">API Reference</a>
</li>
<li class="md-nav__item">
<a href="../about.html" class="md-nav__link">About statsmodels</a>
</li>
<li class="md-nav__item">
<a href="../dev/index.html" class="md-nav__link">Developer Page</a>
</li>
<li class="md-nav__item">
<a href="../release/index.html" class="md-nav__link">Release Notes</a>
</li>
</ul>
</nav>
</div>
</div>
</div>
<div class="md-sidebar md-sidebar--secondary" data-md-component="toc">
<div class="md-sidebar__scrollwrap">
<div class="md-sidebar__inner">
<nav class="md-nav md-nav--secondary">
<label class="md-nav__title" for="__toc">Contents</label>
<ul class="md-nav__list" data-md-scrollfix="">
<li class="md-nav__item"><a href="#generated-statsmodels-genmod-generalized-estimating-equations-ordinalgee-loglike-mu--page-root" class="md-nav__link">statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike_mu</a><nav class="md-nav">
<ul class="md-nav__list">
<li class="md-nav__item"><a href="#statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike_mu" class="md-nav__link"><code class="docutils literal notranslate"><span class="pre">OrdinalGEE.loglike_mu</span></code></a>
</li></ul>
</nav>
</li>
<li class="md-nav__item"><a class="md-nav__extra_link" href="../_sources/generated/statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike_mu.rst.txt">Show Source</a> </li>
<li id="searchbox" class="md-nav__item"></li>
</ul>
</nav>
</div>
</div>
</div>
<div class="md-content">
<article class="md-content__inner md-typeset" role="main">
<section id="statsmodels-genmod-generalized-estimating-equations-ordinalgee-loglike-mu">
<h1 id="generated-statsmodels-genmod-generalized-estimating-equations-ordinalgee-loglike-mu--page-root">statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike_mu<a class="headerlink" href="#generated-statsmodels-genmod-generalized-estimating-equations-ordinalgee-loglike-mu--page-root" title="Permalink to this heading">¶</a></h1>
<dl class="py method">
<dt class="sig sig-object py" id="statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike_mu">
<span class="sig-prename descclassname"><span class="pre">OrdinalGEE.</span></span><span class="sig-name descname"><span class="pre">loglike_mu</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">mu</span></span></em>, <em class="sig-param"><span class="n"><span class="pre">scale</span></span><span class="o"><span class="pre">=</span></span><span class="default_value"><span class="pre">1.0</span></span></em><span class="sig-paren">)</span><a class="headerlink" href="#statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike_mu" title="Permalink to this definition">¶</a></dt>
<dd><p>Evaluate the log-likelihood for a generalized linear model.</p>
</dd></dl>
</section>
</article>
</div>
</div>
</main>
</div>
<footer class="md-footer">
<div class="md-footer-nav">
<nav class="md-footer-nav__inner md-grid">
<a href="statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike.html" title="statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike"
class="md-flex md-footer-nav__link md-footer-nav__link--prev"
rel="prev">
<div class="md-flex__cell md-flex__cell--shrink">
<i class="md-icon md-icon--arrow-back md-footer-nav__button"></i>
</div>
<div class="md-flex__cell md-flex__cell--stretch md-footer-nav__title">
<span class="md-flex__ellipsis">
<span
class="md-footer-nav__direction"> Previous </span> statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike </span>
</div>
</a>
<a href="statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.mean_deriv.html" title="statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.mean_deriv"
class="md-flex md-footer-nav__link md-footer-nav__link--next"
rel="next">
<div class="md-flex__cell md-flex__cell--stretch md-footer-nav__title"><span
class="md-flex__ellipsis"> <span
class="md-footer-nav__direction"> Next </span> statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.mean_deriv </span>
</div>
<div class="md-flex__cell md-flex__cell--shrink"><i
class="md-icon md-icon--arrow-forward md-footer-nav__button"></i>
</div>
</a>
</nav>
</div>
<div class="md-footer-meta md-typeset">
<div class="md-footer-meta__inner md-grid">
<div class="md-footer-copyright">
<div class="md-footer-copyright__highlight">
© Copyright 2009-2019, Josef Perktold, Skipper Seabold, Jonathan Taylor, statsmodels-developers.
</div>
Last updated on
Nov 11, 2022.
<br/>
Created using
<a href="http://www.sphinx-doc.org/">Sphinx</a> 5.3.0.
and
<a href="https://github.com/bashtage/sphinx-material/">Material for
Sphinx</a>
</div>
</div>
</div>
</footer>
<script src="../_static/javascripts/application.js"></script>
<script>app.initialize({version: "1.0.4", url: {base: ".."}})</script>
</body>
</html> | {
"content_hash": "bf16da5fd072939f6e7f7b4fd3d59c91",
"timestamp": "",
"source": "github",
"line_count": 506,
"max_line_length": 999,
"avg_line_length": 39.41106719367589,
"alnum_prop": 0.605355531040016,
"repo_name": "statsmodels/statsmodels.github.io",
"id": "f89b47db4529d993f80133836224a208d25e07dc",
"size": "19946",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "devel/generated/statsmodels.genmod.generalized_estimating_equations.OrdinalGEE.loglike_mu.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
<h2>{{greeting}} My First Angular2 nested component</h2>
| {
"content_hash": "2c05003acc6cac279e34ecd6ff03b6aa",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 56,
"avg_line_length": 57,
"alnum_prop": 0.7368421052631579,
"repo_name": "jmc420/examples",
"id": "c12cf361681b76b2cd24beac64ff6f5fb63ce29f",
"size": "57",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "angular2-typescript-systemjs/src/html/component.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "164"
},
{
"name": "HTML",
"bytes": "1498"
},
{
"name": "JavaScript",
"bytes": "1425515"
},
{
"name": "TypeScript",
"bytes": "6637"
}
],
"symlink_target": ""
} |
The examples in this section show how to use the AWS SDK for .NET with Amazon Relational Database Service (Amazon RDS).
Amazon RDS is a web service that makes it easier to set up, operate, and scale a relational database in the cloud.
## ⚠️ Important
* Running this code might result in charges to your AWS account.
* Running the tests might result in charges to your AWS account.
* We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege).
* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services).
## Code examples
### Single actions
Code excerpts that show you how to call individual service functions.
* [Create a DB parameter group](Actions/RDSWrapper.ParameterGroups.cs)(`CreateDbParameterGroupAsync`)
* [Create a snapshot of a DB instance](Actions/RDSWrapper.Snaspshots.cs)(`CreateDbSnapshotAsync`)
* [Create a DB instance](Actions/RDSWrapper.Instances.cs)(`CreateDBInstanceAsync`)
* [Delete a DB instance](Actions/RDSWrapper.Instances.cs)(`DeleteDBInstanceAsync`)
* [Delete a DB parameter group](Actions/RDSWrapper.ParameterGroups.cs)(`DeleteDbParameterGroupAsync`)
* [Describe DB instances](Actions/RDSWrapper.Instances.cs)(`DescribeDBInstancesAsync`)
* [Describe DB parameter groups](Actions/RDSWrapper.ParameterGroups.cs)(`DescribeDbParameterGroupsAsync`)
* [Describe database engine versions](Actions/RDSWrapper.Instances.cs)(`DescribeDbEngineVersionsAsync`)
* [Describe options for DB instances](Actions/RDSWrapper.Instances.cs)(`DescribeOrderableDbInstanceOptionsAsync`)
* [Describe parameters in a DB parameter group](Actions/RDSWrapper.ParameterGroups.cs)(`DescribeDbParametersAsync`)
* [Describe snapshots of DB instances](Actions/RDSWrapper.Snaspshots.cs)(`DescribeDbSnapshotsAsync`)
* [Update parameters in a DB parameter group](Actions/RDSWrapper.ParameterGroups.cs)(`ModifyDbParameterGroupAsync`)
### Scenarios
Code examples that show you how to accomplish a specific task by calling
multiple functions within the same service.
* [Get started with DB instances](Scenarios/RDSInstanceScenario/RDSInstanceScenario.cs)
## Run the examples
### Prerequisites
* To find prerequisites for running these examples, see the
[README](../README.md#Prerequisites) in the dotnetv3 folder.
After the example compiles, you can run it from the command line. To do so,
navigate to the folder that contains the .csproj file and run the following
command:
```
dotnet run
```
Alternatively, you can run the example from within your IDE.
## Tests
⚠️ Running the tests might result in charges to your AWS account.
The solution includes a test project. To run the tests, navigate to the folder that contains the test project and then issue the following command:
```
dotnet test
```
Alternatively, you can open the example solution and use the Visual Studio Test Runner to run the tests.
## Additional resources
* [Amazon RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Welcome.html)
* [Amazon RDS API Reference](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/Welcome.html)
* [AWS SDK for .NET Amazon RDS](https://docs.aws.amazon.com/sdkfornet/v3/apidocs/items/RDS/NRDS.html)
* [AWS SDK for .NET Developer Guide](https://docs.aws.amazon.com/sdk-for-net/v3/developer-guide/welcome.html)
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: Apache-2.0
| {
"content_hash": "86ee298b0732f24bc3247688e2c3a0f7",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 273,
"avg_line_length": 53.2463768115942,
"alnum_prop": 0.7917800762112139,
"repo_name": "awsdocs/aws-doc-sdk-examples",
"id": "f1933af21625e8ae06b7aef87a3d914200301844",
"size": "3743",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "dotnetv3/RDS/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "476653"
},
{
"name": "Batchfile",
"bytes": "900"
},
{
"name": "C",
"bytes": "3852"
},
{
"name": "C#",
"bytes": "2051923"
},
{
"name": "C++",
"bytes": "943634"
},
{
"name": "CMake",
"bytes": "82068"
},
{
"name": "CSS",
"bytes": "33378"
},
{
"name": "Dockerfile",
"bytes": "2243"
},
{
"name": "Go",
"bytes": "1764292"
},
{
"name": "HTML",
"bytes": "319090"
},
{
"name": "Java",
"bytes": "4966853"
},
{
"name": "JavaScript",
"bytes": "1655476"
},
{
"name": "Jupyter Notebook",
"bytes": "9749"
},
{
"name": "Kotlin",
"bytes": "1099902"
},
{
"name": "Makefile",
"bytes": "4922"
},
{
"name": "PHP",
"bytes": "1220594"
},
{
"name": "Python",
"bytes": "2507509"
},
{
"name": "Ruby",
"bytes": "500331"
},
{
"name": "Rust",
"bytes": "558811"
},
{
"name": "Shell",
"bytes": "63776"
},
{
"name": "Swift",
"bytes": "267325"
},
{
"name": "TypeScript",
"bytes": "119632"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_45) on Thu Mar 26 16:48:38 UTC 2015 -->
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<TITLE>
Uses of Package com.hazelcast.hibernate (Hazelcast Root 3.4.2 API)
</TITLE>
<META NAME="date" CONTENT="2015-03-26">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Package com.hazelcast.hibernate (Hazelcast Root 3.4.2 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../index.html?com/hazelcast/hibernate/package-use.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-use.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Package<br>com.hazelcast.hibernate</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../com/hazelcast/hibernate/package-summary.html">com.hazelcast.hibernate</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#com.hazelcast.hibernate"><B>com.hazelcast.hibernate</B></A></TD>
<TD>Contains interfaces/classes related to Hibernate. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#com.hazelcast.hibernate.access"><B>com.hazelcast.hibernate.access</B></A></TD>
<TD>Provides access interfaces/classes for Hibernate. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#com.hazelcast.hibernate.distributed"><B>com.hazelcast.hibernate.distributed</B></A></TD>
<TD>Provides distributed class for Hibernate. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#com.hazelcast.hibernate.local"><B>com.hazelcast.hibernate.local</B></A></TD>
<TD>Provides local classes for Hibernate. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#com.hazelcast.hibernate.region"><B>com.hazelcast.hibernate.region</B></A></TD>
<TD>Provides region interfaces/classes for Hibernate. </TD>
</TR>
</TABLE>
<P>
<A NAME="com.hazelcast.hibernate"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../com/hazelcast/hibernate/package-summary.html">com.hazelcast.hibernate</A> used by <A HREF="../../../com/hazelcast/hibernate/package-summary.html">com.hazelcast.hibernate</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../com/hazelcast/hibernate/class-use/AbstractHazelcastCacheRegionFactory.html#com.hazelcast.hibernate"><B>AbstractHazelcastCacheRegionFactory</B></A></B>
<BR>
Abstract superclass of Hazelcast based <CODE>RegionFactory</CODE> implementations</TD>
</TR>
</TABLE>
<P>
<A NAME="com.hazelcast.hibernate.access"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../com/hazelcast/hibernate/package-summary.html">com.hazelcast.hibernate</A> used by <A HREF="../../../com/hazelcast/hibernate/access/package-summary.html">com.hazelcast.hibernate.access</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../com/hazelcast/hibernate/class-use/RegionCache.html#com.hazelcast.hibernate.access"><B>RegionCache</B></A></B>
<BR>
This interface defines an internal cached region implementation as well as a mechanism
to unmap the cache to an underlying Map data-structure</TD>
</TR>
</TABLE>
<P>
<A NAME="com.hazelcast.hibernate.distributed"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../com/hazelcast/hibernate/package-summary.html">com.hazelcast.hibernate</A> used by <A HREF="../../../com/hazelcast/hibernate/distributed/package-summary.html">com.hazelcast.hibernate.distributed</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../com/hazelcast/hibernate/class-use/RegionCache.html#com.hazelcast.hibernate.distributed"><B>RegionCache</B></A></B>
<BR>
This interface defines an internal cached region implementation as well as a mechanism
to unmap the cache to an underlying Map data-structure</TD>
</TR>
</TABLE>
<P>
<A NAME="com.hazelcast.hibernate.local"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../com/hazelcast/hibernate/package-summary.html">com.hazelcast.hibernate</A> used by <A HREF="../../../com/hazelcast/hibernate/local/package-summary.html">com.hazelcast.hibernate.local</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../com/hazelcast/hibernate/class-use/RegionCache.html#com.hazelcast.hibernate.local"><B>RegionCache</B></A></B>
<BR>
This interface defines an internal cached region implementation as well as a mechanism
to unmap the cache to an underlying Map data-structure</TD>
</TR>
</TABLE>
<P>
<A NAME="com.hazelcast.hibernate.region"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../com/hazelcast/hibernate/package-summary.html">com.hazelcast.hibernate</A> used by <A HREF="../../../com/hazelcast/hibernate/region/package-summary.html">com.hazelcast.hibernate.region</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../com/hazelcast/hibernate/class-use/RegionCache.html#com.hazelcast.hibernate.region"><B>RegionCache</B></A></B>
<BR>
This interface defines an internal cached region implementation as well as a mechanism
to unmap the cache to an underlying Map data-structure</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../index.html?com/hazelcast/hibernate/package-use.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-use.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2015 <a href="http://www.hazelcast.com/">Hazelcast, Inc.</a>. All Rights Reserved.
</BODY>
</HTML>
| {
"content_hash": "74a46799a712f1648b116c45e8c93d03",
"timestamp": "",
"source": "github",
"line_count": 251,
"max_line_length": 238,
"avg_line_length": 45.191235059760956,
"alnum_prop": 0.6652561050868377,
"repo_name": "akiskip/KoDeMat-Collaboration-Platform-Application",
"id": "57ad0efd832402d976155db06b2c66eccb1cdb7d",
"size": "11343",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "KoDeMat_TouchScreen/lib/hazelcast-3.4.2/hazelcast-3.4.2/docs/javadoc/com/hazelcast/hibernate/package-use.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1255"
},
{
"name": "CSS",
"bytes": "12362"
},
{
"name": "GLSL",
"bytes": "105719"
},
{
"name": "HTML",
"bytes": "17271482"
},
{
"name": "Java",
"bytes": "1388877"
},
{
"name": "JavaScript",
"bytes": "110983"
},
{
"name": "Shell",
"bytes": "1365"
}
],
"symlink_target": ""
} |
import logging
import smtplib
from email.mime.text import MIMEText
from ConfigParser import SafeConfigParser
def sendEmail(subject, text):
parser = SafeConfigParser()
parser.read('config.ini')
from_user = parser.get('r_cricket_bot','email_id')
from_pass = parser.get('r_cricket_bot','email_pass')
to_user = [parser.get('r_cricket_bot','to_email_id')]
message = """\From: %s\nTo: %s\nSubject: %s\n\n\n%s""" % (from_user, ",".join(to_user), subject, text)
try:
s = smtplib.SMTP('smtp.gmail.com', 587)
s.ehlo()
s.starttls()
s.ehlo()
s.login(from_user, from_pass)
s.sendmail(from_user,to_user,message)
s.quit()
except:
logging.warning('Cannot send email.')
| {
"content_hash": "cb4d4bef8a0cd79fbe7243780296ef15",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 104,
"avg_line_length": 30.565217391304348,
"alnum_prop": 0.6614509246088194,
"repo_name": "rreyv/rcb",
"id": "608382abddf9afe80dade8f740f0a9ea49671e2d",
"size": "703",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "emails.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25013"
}
],
"symlink_target": ""
} |
ota custom server for miui updater
| {
"content_hash": "a92dd8a9a067cd606ffd0c9f3668a91b",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 34,
"avg_line_length": 35,
"alnum_prop": 0.8285714285714286,
"repo_name": "noBrainsDev/ota-server",
"id": "b39758671e4ccd76f6fb2789dd88fa172069b6b6",
"size": "72",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "146"
},
{
"name": "CSS",
"bytes": "38646"
},
{
"name": "JavaScript",
"bytes": "21915"
},
{
"name": "PHP",
"bytes": "177395"
}
],
"symlink_target": ""
} |
namespace content {
class WebXRSessionTracker
: public SessionTracker<ukm::builders::XR_WebXR_Session>,
device::mojom::XRSessionMetricsRecorder {
public:
explicit WebXRSessionTracker(
std::unique_ptr<ukm::builders::XR_WebXR_Session> entry);
~WebXRSessionTracker() override;
// Records which features for the session have been requested as required or
// optional, which were accepted/rejeceted, and which weren't requested at
// all. This assumes that the session as a whole was accepted.
void ReportRequestedFeatures(
const device::mojom::XRSessionOptions& session_options,
const std::set<device::mojom::XRSessionFeature>& enabled_features);
// |XRSessionMetricsRecorder| implementation
void ReportFeatureUsed(device::mojom::XRSessionFeature feature) override;
// Binds this tracker's |XRSessionMetricsRecorder| receiver to a new pipe, and
// returns the |PendingRemote|.
mojo::PendingRemote<device::mojom::XRSessionMetricsRecorder>
BindMetricsRecorderPipe();
private:
void SetFeatureRequest(device::mojom::XRSessionFeature feature,
device::mojom::XRSessionFeatureRequestStatus status);
mojo::Receiver<device::mojom::XRSessionMetricsRecorder> receiver_;
};
} // namespace content
#endif // CONTENT_BROWSER_XR_METRICS_WEBXR_SESSION_TRACKER_H_
| {
"content_hash": "928d73790a23ccd06e85bc4e4b4a09a8",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 80,
"avg_line_length": 38.17142857142857,
"alnum_prop": 0.7514970059880239,
"repo_name": "endlessm/chromium-browser",
"id": "148762b5d808efcfb7e0874e9b43ca2a6f8d1de3",
"size": "1930",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "content/browser/xr/metrics/webxr_session_tracker.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
<?php
use DTS\eBaySDK\Trading\Types\SkypeMeTransactionalEnabledDefinitionType;
class SkypeMeTransactionalEnabledDefinitionTypeTest extends \PHPUnit_Framework_TestCase
{
private $obj;
protected function setUp()
{
$this->obj = new SkypeMeTransactionalEnabledDefinitionType();
}
public function testCanBeCreated()
{
$this->assertInstanceOf('\DTS\eBaySDK\Trading\Types\SkypeMeTransactionalEnabledDefinitionType', $this->obj);
}
public function testExtendsBaseType()
{
$this->assertInstanceOf('\DTS\eBaySDK\Types\BaseType', $this->obj);
}
}
| {
"content_hash": "5415b398d3474e8be77d2c81c0397173",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 116,
"avg_line_length": 26.304347826086957,
"alnum_prop": 0.7206611570247934,
"repo_name": "spoilie/ebay-sdk-trading",
"id": "f8b22ec1dcfdd8b7db57c100c52fe021b47c42f7",
"size": "605",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/DTS/eBaySDK/Trading/Types/SkypeMeTransactionalEnabledDefinitionTypeTest.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1963"
},
{
"name": "PHP",
"bytes": "3778863"
}
],
"symlink_target": ""
} |
<?php
namespace SlmMail\Service;
use Zend\Http\Client as HttpClient;
use Zend\Http\Request as HttpRequest;
use Zend\Http\Response as HttpResponse;
use Zend\Mail\Address;
use Zend\Mail\Message;
class SendGridService extends AbstractMailService
{
/**
* API endpoint
*/
const API_ENDPOINT = 'https://sendgrid.com/api';
/**
* SendGrid username
*
* @var string
*/
protected $username;
/**
* SendGrid API key
*
* @var string
*/
protected $apiKey;
/**
* @param string $username
* @param string $apiKey
*/
public function __construct($username, $apiKey)
{
$this->username = (string) $username;
$this->apiKey = (string) $apiKey;
}
/**
* ------------------------------------------------------------------------------------------
* MESSAGES
* ------------------------------------------------------------------------------------------
*/
/**
* {@inheritDoc}
* @link http://sendgrid.com/docs/API_Reference/Web_API/mail.html
* @return mixed
*/
public function send(Message $message)
{
$from = $message->getFrom();
if (count($from) !== 1) {
throw new Exception\RuntimeException(
'SendGrid API requires exactly one from sender'
);
}
if (count($message->getCc())) {
throw new Exception\RuntimeException('SendGrid does not support CC addresses');
}
$parameters = array(
'from' => $from->rewind()->getEmail(),
'fromname' => $from->rewind()->getName(),
'subject' => $message->getSubject(),
'text' => $this->extractText($message),
'html' => $this->extractHtml($message)
);
foreach ($message->getTo() as $address) {
$parameters['to'][] = $address->getEmail();
}
foreach ($message->getBcc() as $address) {
$parameters['bcc'][] = $address->getEmail();
}
$replyTo = $message->getReplyTo();
if (count($replyTo) > 1) {
throw new Exception\RuntimeException('SendGrid has only support for one Reply-To address');
} elseif (count($replyTo)) {
$parameters['replyto'] = $replyTo->rewind()->getEmail();
}
$client = $this->prepareHttpClient('/mail.send.json');
// Set Parameters as POST, since prepareHttpClient() put only GET parameters
$client->setParameterPost($parameters);
// Eventually add files. This cannot be done before prepareHttpClient call because prepareHttpClient
// reset all parameters (response, request...), therefore we would loose the file upload
$post = $client->getRequest()->getPost();
$attachments = $this->extractAttachments($message);
foreach ($attachments as $attachment) {
$post->set('files[' . $attachment->filename . ']', $attachment->getRawContent());
}
$response = $client->setMethod(HttpRequest::METHOD_POST)
->setEncType(HttpClient::ENC_FORMDATA)
->send();
return $this->parseResponse($response);
}
/**
* Get a list of bounces with addresses and response codes, optionally with dates
*
* @param int $date must be 1 if you want to retrieve dates
* @param string $startDate if specified, must be in YYYY-MM-DD format and < $endDate
* @param string $endDate if specified, must be in YYYY-MM-DD format and > $startDate
* @param bool $aggregate true if you are interested in all-time totals
* @return array
*/
public function getStatistics($date = 1, $startDate = '', $endDate = '', $aggregate = false)
{
$parameters = array('date' => $date, 'start_date' => $startDate, 'end_date' => $endDate, 'aggregate' => (int)$aggregate);
$response = $this->prepareHttpClient('/stats.get.json', $parameters)
->send();
return $this->parseResponse($response);
}
/**
* ------------------------------------------------------------------------------------------
* BOUNCES
* ------------------------------------------------------------------------------------------
*/
/**
* Get a list of bounces with addresses and response codes, optionally with dates
*
* @param int $date must be 1 if you want to retrieve dates
* @param int $days if specified, must be superior to 0
* @param string $startDate if specified, must be in YYYY-MM-DD format and < $endDate
* @param string $endDate if specified, must be in YYYY-MM-DD format and > $startDate
* @param string $email optional email to search for
* @param int $limit optional field to limit the number of returned results
* @param int $offset optional beginning point to retrieve results
* @return array
*/
public function getBounces($date = 1, $days = 1, $startDate = '', $endDate = '', $email = '', $limit = 100, $offset = 0)
{
$parameters = array('date' => $date, 'days' => $days, 'start_date' => $startDate, 'end_date' => $endDate,
'email' => $email, 'limit' => $limit, 'offset' => $offset);
$response = $this->prepareHttpClient('/bounces.get.json', $parameters)
->send();
return $this->parseResponse($response);
}
/**
* Delete an address from the Bounce list. Note that if no parameters are specified the ENTIRE list will be deleted.
*
* @param string $startDate if specified, must be in YYYY-MM-DD format and < $endDate
* @param string $endDate if specified, must be in YYYY-MM-DD format and > $startDate
* @param string $email optional email to search for
* @return array
*/
public function deleteBounces($startDate = '', $endDate = '', $email = '')
{
$parameters = array('start_date' => $startDate, 'end_date' => $endDate, 'email' => $email);
$response = $this->prepareHttpClient('/bounces.delete.json', $parameters)
->send();
return $this->parseResponse($response);
}
/**
* @param string $startDate if specified, must be in YYYY-MM-DD format and < $endDate
* @param string $endDate if specified, must be in YYYY-MM-DD format and > $startDate
* @return array
*/
public function countBounces($startDate = '', $endDate = '')
{
$parameters = array('start_date' => $startDate, 'end_date' => $endDate);
$response = $this->prepareHttpClient('/bounces.count.json', $parameters)
->send();
return $this->parseResponse($response);
}
/**
* ------------------------------------------------------------------------------------------
* SPAMS
* ------------------------------------------------------------------------------------------
*/
/**
* Retrieve and delete entries in the Spam Reports list
*
* @param int $date must be 1 if you want to retrieve dates
* @param int $days if specified, must be superior to 0
* @param string $startDate if specified, must be in YYYY-MM-DD format and < $endDate
* @param string $endDate if specified, must be in YYYY-MM-DD format and > $startDate
* @param string $email optional email to search for
* @param int $limit optional field to limit the number of returned results
* @param int $offset optional beginning point to retrieve results
* @return array
*/
public function getSpamReports($date = 1, $days = 1, $startDate = '', $endDate = '', $email = '', $limit = 100, $offset = 0)
{
$parameters = array('date' => $date, 'days' => $days, 'start_date' => $startDate, 'end_date' => $endDate,
'email' => $email, 'limit' => $limit, 'offset' => $offset);
$response = $this->prepareHttpClient('/spamreports.get.json', $parameters)
->send();
return $this->parseResponse($response);
}
/**
* Delete an address from the Spam Reports list
*
* @param string $email email to search for
* @return array
*/
public function deleteSpamReport($email = '')
{
$response = $this->prepareHttpClient('/spamreports.delete.json', array('email' => $email))
->send();
return $this->parseResponse($response);
}
/**
* ------------------------------------------------------------------------------------------
* BLOCKS
* ------------------------------------------------------------------------------------------
*/
/**
* Get a list of blocks with addresses and response codes, optionally with dates
*
* @link http://sendgrid.com/docs/API_Reference/Web_API/blocks.html
* @param int $date must be 1 if you want to retrieve dates
* @param int $days if specified, must be superior to 0
* @param string $startDate if specified, must be in YYYY-MM-DD format and < $endDate
* @param string $endDate if specified, must be in YYYY-MM-DD format and > $startDate
* @return array
*/
public function getBlocks($date = 1, $days = 1, $startDate = '', $endDate = '')
{
$parameters = array(
'date' => $date,
'days' => $days,
'start_date' => $startDate,
'end_date' => $endDate
);
$response = $this->prepareHttpClient('/blocks.get.json', $parameters)
->send();
return $this->parseResponse($response);
}
/**
* Delete an address from the block list
*
* @link http://sendgrid.com/docs/API_Reference/Web_API/blocks.html
* @param string $email
* @return array
*/
public function deleteBlock($email)
{
$response = $this->prepareHttpClient('/blocks.delete', array('email' => $email))
->send();
return $this->parseResponse($response);
}
/**
* @param string $uri
* @param array $parameters
* @return \Zend\Http\Client
*/
private function prepareHttpClient($uri, array $parameters = array())
{
$parameters = array_merge(array('api_user' => $this->username, 'api_key' => $this->apiKey), $parameters);
return $this->getClient()
->resetParameters()
->setMethod(HttpRequest::METHOD_GET)
->setUri(self::API_ENDPOINT . $uri)
->setParameterGet($this->filterParameters($parameters));
}
/**
* @param HttpResponse $response
* @throws Exception\RuntimeException
* @return array
*/
private function parseResponse(HttpResponse $response)
{
$result = json_decode($response->getBody(), true);
if ($response->isSuccess()) {
return $result;
}
// There is a 4xx error
if ($response->isClientError()) {
if (isset($result['errors']) && is_array($result['errors'])) {
$message = implode(', ', $result['errors']);
} elseif (isset($result['error'])) {
$message = $result['error'];
} else {
$message = 'Unknown error';
}
throw new Exception\RuntimeException(sprintf(
'An error occured on SendGrid (http code %s), message: %s', $response->getStatusCode(), $message
));
}
// There is a 5xx error
throw new Exception\RuntimeException('SendGrid server error, please try again');
}
}
| {
"content_hash": "e40e090750cb67189463e00e7889faf5",
"timestamp": "",
"source": "github",
"line_count": 327,
"max_line_length": 129,
"avg_line_length": 36.29051987767584,
"alnum_prop": 0.5247324513356366,
"repo_name": "exclie/imedich",
"id": "31ad4c5f859ab4d6303d0d635244b782a75a97ed",
"size": "13702",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "vendor/slm/mail/src/SlmMail/Service/SendGridService.php",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1181584"
},
{
"name": "Go",
"bytes": "7075"
},
{
"name": "JavaScript",
"bytes": "2385957"
},
{
"name": "PHP",
"bytes": "470298"
},
{
"name": "Python",
"bytes": "5844"
},
{
"name": "Shell",
"bytes": "274"
}
],
"symlink_target": ""
} |
#ifndef UDMX_H
#define UDMX_H
#include <QStringList>
#include <QList>
#include "qlcioplugin.h"
class UDMXDevice;
class UDMX : public QLCIOPlugin
{
Q_OBJECT
Q_INTERFACES(QLCIOPlugin)
#if QT_VERSION > QT_VERSION_CHECK(5, 0, 0)
Q_PLUGIN_METADATA(IID QLCIOPlugin_iid)
#endif
/*********************************************************************
* Initialization
*********************************************************************/
public:
/** @reimp */
virtual ~UDMX();
/** @reimp */
void init();
/** @reimp */
QString name();
/** @reimp */
int capabilities() const;
/** @reimp */
QString pluginInfo();
/** @reimp */
void setParameter(QString name, QVariant &value)
{ Q_UNUSED(name); Q_UNUSED(value); }
/*********************************************************************
* Outputs
*********************************************************************/
public:
/** @reimp */
void openOutput(quint32 output);
/** @reimp */
void closeOutput(quint32 output);
/** @reimp */
QStringList outputs();
/** @reimp */
QString outputInfo(quint32 output);
/** @reimp */
void writeUniverse(quint32 universe, quint32 output, const QByteArray& data);
private:
/** Attempt to find all uDMX devices */
void rescanDevices();
/** Get a UDMXDevice entry by its usbdev struct */
UDMXDevice* device(struct usb_device* usbdev);
private:
/** List of available devices */
QList <UDMXDevice*> m_devices;
/*************************************************************************
* Inputs
*************************************************************************/
public:
/** @reimp */
void openInput(quint32 input) { Q_UNUSED(input); }
/** @reimp */
void closeInput(quint32 input) { Q_UNUSED(input); }
/** @reimp */
QStringList inputs() { return QStringList(); }
/** @reimp */
QString inputInfo(quint32 input) { Q_UNUSED(input); return QString(); }
/** @reimp */
void sendFeedBack(quint32 input, quint32 channel, uchar value, const QString& key)
{ Q_UNUSED(input); Q_UNUSED(channel); Q_UNUSED(value); Q_UNUSED(key); }
/*********************************************************************
* Configuration
*********************************************************************/
public:
/** @reimp */
void configure();
/** @reimp */
bool canConfigure();
};
#endif
| {
"content_hash": "ae7ff0ea9a7da244e4f19157b525e626",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 86,
"avg_line_length": 23.914285714285715,
"alnum_prop": 0.4555953803265631,
"repo_name": "hveld/qlcplus",
"id": "47cf96c5b3b8303f03d9eaa090ebedeb7cede3d4",
"size": "3157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins/udmx/src/udmx.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AppleScript",
"bytes": "3195"
},
{
"name": "Batchfile",
"bytes": "1307"
},
{
"name": "C",
"bytes": "207216"
},
{
"name": "C++",
"bytes": "4795065"
},
{
"name": "HTML",
"bytes": "19383"
},
{
"name": "JavaScript",
"bytes": "62058"
},
{
"name": "NSIS",
"bytes": "10477"
},
{
"name": "Objective-C",
"bytes": "17578"
},
{
"name": "QMake",
"bytes": "132150"
},
{
"name": "Ruby",
"bytes": "10928"
},
{
"name": "Shell",
"bytes": "21935"
}
],
"symlink_target": ""
} |
"""Provides an HTTP API for mobile_app."""
import uuid
from typing import Dict
from aiohttp.web import Response, Request
from homeassistant.auth.util import generate_secret
from homeassistant.components.cloud import (
async_create_cloudhook,
async_remote_ui_url,
CloudNotAvailable,
)
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.const import HTTP_CREATED, CONF_WEBHOOK_ID
from .const import (
ATTR_DEVICE_ID,
ATTR_SUPPORTS_ENCRYPTION,
CONF_CLOUDHOOK_URL,
CONF_REMOTE_UI_URL,
CONF_SECRET,
CONF_USER_ID,
DOMAIN,
REGISTRATION_SCHEMA,
)
from .helpers import supports_encryption
class RegistrationsView(HomeAssistantView):
"""A view that accepts registration requests."""
url = "/api/mobile_app/registrations"
name = "api:mobile_app:register"
@RequestDataValidator(REGISTRATION_SCHEMA)
async def post(self, request: Request, data: Dict) -> Response:
"""Handle the POST request for registration."""
hass = request.app["hass"]
webhook_id = generate_secret()
if hass.components.cloud.async_active_subscription():
data[CONF_CLOUDHOOK_URL] = await async_create_cloudhook(hass, webhook_id)
data[ATTR_DEVICE_ID] = str(uuid.uuid4()).replace("-", "")
data[CONF_WEBHOOK_ID] = webhook_id
if data[ATTR_SUPPORTS_ENCRYPTION] and supports_encryption():
from nacl.secret import SecretBox
data[CONF_SECRET] = generate_secret(SecretBox.KEY_SIZE)
data[CONF_USER_ID] = request["hass_user"].id
ctx = {"source": "registration"}
await hass.async_create_task(
hass.config_entries.flow.async_init(DOMAIN, context=ctx, data=data)
)
remote_ui_url = None
try:
remote_ui_url = async_remote_ui_url(hass)
except CloudNotAvailable:
pass
return self.json(
{
CONF_CLOUDHOOK_URL: data.get(CONF_CLOUDHOOK_URL),
CONF_REMOTE_UI_URL: remote_ui_url,
CONF_SECRET: data.get(CONF_SECRET),
CONF_WEBHOOK_ID: data[CONF_WEBHOOK_ID],
},
status_code=HTTP_CREATED,
)
| {
"content_hash": "8a9056dd8b6339c141f5f2306a33ada8",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 85,
"avg_line_length": 29.948051948051948,
"alnum_prop": 0.6504770164787511,
"repo_name": "fbradyirl/home-assistant",
"id": "67914ea70763967c9b3485760287602abc12b57c",
"size": "2306",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/mobile_app/http_api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1829"
},
{
"name": "Python",
"bytes": "16494727"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17784"
}
],
"symlink_target": ""
} |
<?php
/**
* Lexes a template string.
*
* @package twig
* @author Fabien Potencier <fabien@symfony.com>
*/
class Twig_Lexer implements Twig_LexerInterface
{
protected $tokens;
protected $code;
protected $cursor;
protected $lineno;
protected $end;
protected $state;
protected $states;
protected $brackets;
protected $env;
protected $filename;
protected $options;
protected $regexes;
protected $position;
protected $positions;
const STATE_DATA = 0;
const STATE_BLOCK = 1;
const STATE_VAR = 2;
const STATE_STRING = 3;
const STATE_INTERPOLATION = 4;
const REGEX_NAME = '/[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/A';
const REGEX_NUMBER = '/[0-9]+(?:\.[0-9]+)?/A';
const REGEX_STRING = '/"([^#"\\\\]*(?:\\\\.[^#"\\\\]*)*)"|\'([^\'\\\\]*(?:\\\\.[^\'\\\\]*)*)\'/As';
const REGEX_DQ_STRING_DELIM = '/"/A';
const REGEX_DQ_STRING_PART = '/[^#"\\\\]*(?:(?:\\\\.|#(?!\{))[^#"\\\\]*)*/As';
const PUNCTUATION = '()[]{}?:.,|';
public function __construct(Twig_Environment $env, array $options = array())
{
$this->env = $env;
$this->options = array_merge(array(
'tag_comment' => array('{#', '#}'),
'tag_block' => array('{%', '%}'),
'tag_variable' => array('{{', '}}'),
'whitespace_trim' => '-',
'interpolation' => array('#{', '}'),
), $options);
$this->regexes = array(
'lex_var' => '/\s*'.preg_quote($this->options['whitespace_trim'].$this->options['tag_variable'][1], '/').'\s*|\s*'.preg_quote($this->options['tag_variable'][1], '/').'/A',
'lex_block' => '/\s*(?:'.preg_quote($this->options['whitespace_trim'].$this->options['tag_block'][1], '/').'\s*|\s*'.preg_quote($this->options['tag_block'][1], '/').')\n?/A',
'lex_raw_data' => '/('.preg_quote($this->options['tag_block'][0].$this->options['whitespace_trim'], '/').'|'.preg_quote($this->options['tag_block'][0], '/').')\s*endraw\s*(?:'.preg_quote($this->options['whitespace_trim'].$this->options['tag_block'][1], '/').'\s*|\s*'.preg_quote($this->options['tag_block'][1], '/').')/s',
'operator' => $this->getOperatorRegex(),
'lex_comment' => '/(?:'.preg_quote($this->options['whitespace_trim'], '/').preg_quote($this->options['tag_comment'][1], '/').'\s*|'.preg_quote($this->options['tag_comment'][1], '/').')\n?/s',
'lex_block_raw' => '/\s*raw\s*(?:'.preg_quote($this->options['whitespace_trim'].$this->options['tag_block'][1], '/').'\s*|\s*'.preg_quote($this->options['tag_block'][1], '/').')/As',
'lex_block_line' => '/\s*line\s+(\d+)\s*'.preg_quote($this->options['tag_block'][1], '/').'/As',
'lex_tokens_start' => '/('.preg_quote($this->options['tag_variable'][0], '/').'|'.preg_quote($this->options['tag_block'][0], '/').'|'.preg_quote($this->options['tag_comment'][0], '/').')('.preg_quote($this->options['whitespace_trim'], '/').')?/s',
'interpolation_start' => '/'.preg_quote($this->options['interpolation'][0], '/').'\s*/A',
'interpolation_end' => '/\s*'.preg_quote($this->options['interpolation'][1], '/').'/A',
);
}
/**
* Tokenizes a source code.
*
* @param string $code The source code
* @param string $filename A unique identifier for the source code
*
* @return Twig_TokenStream A token stream instance
*/
public function tokenize($code, $filename = null)
{
if (function_exists('mb_internal_encoding') && ((int) ini_get('mbstring.func_overload')) & 2) {
$mbEncoding = mb_internal_encoding();
mb_internal_encoding('ASCII');
}
$this->code = str_replace(array("\r\n", "\r"), "\n", $code);
$this->filename = $filename;
$this->cursor = 0;
$this->lineno = 1;
$this->end = strlen($this->code);
$this->tokens = array();
$this->state = self::STATE_DATA;
$this->states = array();
$this->brackets = array();
$this->position = -1;
// find all token starts in one go
preg_match_all($this->regexes['lex_tokens_start'], $this->code, $matches, PREG_OFFSET_CAPTURE);
$this->positions = $matches;
while ($this->cursor < $this->end) {
// dispatch to the lexing functions depending
// on the current state
switch ($this->state) {
case self::STATE_DATA:
$this->lexData();
break;
case self::STATE_BLOCK:
$this->lexBlock();
break;
case self::STATE_VAR:
$this->lexVar();
break;
case self::STATE_STRING:
$this->lexString();
break;
case self::STATE_INTERPOLATION:
$this->lexInterpolation();
break;
}
}
$this->pushToken(Twig_Token::EOF_TYPE);
if (!empty($this->brackets)) {
list($expect, $lineno) = array_pop($this->brackets);
throw new Twig_Error_Syntax(sprintf('Unclosed "%s"', $expect), $lineno, $this->filename);
}
if (isset($mbEncoding)) {
mb_internal_encoding($mbEncoding);
}
return new Twig_TokenStream($this->tokens, $this->filename);
}
protected function lexData()
{
// if no matches are left we return the rest of the template as simple text token
if ($this->position == count($this->positions[0]) - 1) {
$this->pushToken(Twig_Token::TEXT_TYPE, substr($this->code, $this->cursor));
$this->cursor = $this->end;
return;
}
// Find the first token after the current cursor
$position = $this->positions[0][++$this->position];
while ($position[1] < $this->cursor) {
if ($this->position == count($this->positions[0]) - 1) {
return;
}
$position = $this->positions[0][++$this->position];
}
// push the template text first
$text = $textContent = substr($this->code, $this->cursor, $position[1] - $this->cursor);
if (isset($this->positions[2][$this->position][0])) {
$text = rtrim($text);
}
$this->pushToken(Twig_Token::TEXT_TYPE, $text);
$this->moveCursor($textContent.$position[0]);
switch ($this->positions[1][$this->position][0]) {
case $this->options['tag_comment'][0]:
$this->lexComment();
break;
case $this->options['tag_block'][0]:
// raw data?
if (preg_match($this->regexes['lex_block_raw'], $this->code, $match, null, $this->cursor)) {
$this->moveCursor($match[0]);
$this->lexRawData();
// {% line \d+ %}
} elseif (preg_match($this->regexes['lex_block_line'], $this->code, $match, null, $this->cursor)) {
$this->moveCursor($match[0]);
$this->lineno = (int) $match[1];
} else {
$this->pushToken(Twig_Token::BLOCK_START_TYPE);
$this->pushState(self::STATE_BLOCK);
}
break;
case $this->options['tag_variable'][0]:
$this->pushToken(Twig_Token::VAR_START_TYPE);
$this->pushState(self::STATE_VAR);
break;
}
}
protected function lexBlock()
{
if (empty($this->brackets) && preg_match($this->regexes['lex_block'], $this->code, $match, null, $this->cursor)) {
$this->pushToken(Twig_Token::BLOCK_END_TYPE);
$this->moveCursor($match[0]);
$this->popState();
} else {
$this->lexExpression();
}
}
protected function lexVar()
{
if (empty($this->brackets) && preg_match($this->regexes['lex_var'], $this->code, $match, null, $this->cursor)) {
$this->pushToken(Twig_Token::VAR_END_TYPE);
$this->moveCursor($match[0]);
$this->popState();
} else {
$this->lexExpression();
}
}
protected function lexExpression()
{
// whitespace
if (preg_match('/\s+/A', $this->code, $match, null, $this->cursor)) {
$this->moveCursor($match[0]);
if ($this->cursor >= $this->end) {
throw new Twig_Error_Syntax(sprintf('Unexpected end of file: Unclosed "%s"', $this->state === self::STATE_BLOCK ? 'block' : 'variable'), $this->lineno, $this->filename);
}
}
// operators
if (preg_match($this->regexes['operator'], $this->code, $match, null, $this->cursor)) {
$this->pushToken(Twig_Token::OPERATOR_TYPE, $match[0]);
$this->moveCursor($match[0]);
}
// names
elseif (preg_match(self::REGEX_NAME, $this->code, $match, null, $this->cursor)) {
$this->pushToken(Twig_Token::NAME_TYPE, $match[0]);
$this->moveCursor($match[0]);
}
// numbers
elseif (preg_match(self::REGEX_NUMBER, $this->code, $match, null, $this->cursor)) {
$number = (float) $match[0]; // floats
if (ctype_digit($match[0]) && $number <= PHP_INT_MAX) {
$number = (int) $match[0]; // integers lower than the maximum
}
$this->pushToken(Twig_Token::NUMBER_TYPE, $number);
$this->moveCursor($match[0]);
}
// punctuation
elseif (false !== strpos(self::PUNCTUATION, $this->code[$this->cursor])) {
// opening bracket
if (false !== strpos('([{', $this->code[$this->cursor])) {
$this->brackets[] = array($this->code[$this->cursor], $this->lineno);
}
// closing bracket
elseif (false !== strpos(')]}', $this->code[$this->cursor])) {
if (empty($this->brackets)) {
throw new Twig_Error_Syntax(sprintf('Unexpected "%s"', $this->code[$this->cursor]), $this->lineno, $this->filename);
}
list($expect, $lineno) = array_pop($this->brackets);
if ($this->code[$this->cursor] != strtr($expect, '([{', ')]}')) {
throw new Twig_Error_Syntax(sprintf('Unclosed "%s"', $expect), $lineno, $this->filename);
}
}
$this->pushToken(Twig_Token::PUNCTUATION_TYPE, $this->code[$this->cursor]);
++$this->cursor;
}
// strings
elseif (preg_match(self::REGEX_STRING, $this->code, $match, null, $this->cursor)) {
$this->pushToken(Twig_Token::STRING_TYPE, stripcslashes(substr($match[0], 1, -1)));
$this->moveCursor($match[0]);
}
// opening double quoted string
elseif (preg_match(self::REGEX_DQ_STRING_DELIM, $this->code, $match, null, $this->cursor)) {
$this->brackets[] = array('"', $this->lineno);
$this->pushState(self::STATE_STRING);
$this->moveCursor($match[0]);
}
// unlexable
else {
throw new Twig_Error_Syntax(sprintf('Unexpected character "%s"', $this->code[$this->cursor]), $this->lineno, $this->filename);
}
}
protected function lexRawData()
{
if (!preg_match($this->regexes['lex_raw_data'], $this->code, $match, PREG_OFFSET_CAPTURE, $this->cursor)) {
throw new Twig_Error_Syntax(sprintf('Unexpected end of file: Unclosed "block"'), $this->lineno, $this->filename);
}
$text = substr($this->code, $this->cursor, $match[0][1] - $this->cursor);
$this->moveCursor($text.$match[0][0]);
if (false !== strpos($match[1][0], $this->options['whitespace_trim'])) {
$text = rtrim($text);
}
$this->pushToken(Twig_Token::TEXT_TYPE, $text);
}
protected function lexComment()
{
if (!preg_match($this->regexes['lex_comment'], $this->code, $match, PREG_OFFSET_CAPTURE, $this->cursor)) {
throw new Twig_Error_Syntax('Unclosed comment', $this->lineno, $this->filename);
}
$this->moveCursor(substr($this->code, $this->cursor, $match[0][1] - $this->cursor).$match[0][0]);
}
protected function lexString()
{
if (preg_match($this->regexes['interpolation_start'], $this->code, $match, null, $this->cursor)) {
$this->brackets[] = array($this->options['interpolation'][0], $this->lineno);
$this->pushToken(Twig_Token::INTERPOLATION_START_TYPE);
$this->moveCursor($match[0]);
$this->pushState(self::STATE_INTERPOLATION);
} elseif (preg_match(self::REGEX_DQ_STRING_PART, $this->code, $match, null, $this->cursor) && strlen($match[0]) > 0) {
$this->pushToken(Twig_Token::STRING_TYPE, stripcslashes($match[0]));
$this->moveCursor($match[0]);
} elseif (preg_match(self::REGEX_DQ_STRING_DELIM, $this->code, $match, null, $this->cursor)) {
list($expect, $lineno) = array_pop($this->brackets);
if ($this->code[$this->cursor] != '"') {
throw new Twig_Error_Syntax(sprintf('Unclosed "%s"', $expect), $lineno, $this->filename);
}
$this->popState();
++$this->cursor;
}
}
protected function lexInterpolation()
{
$bracket = end($this->brackets);
if ($this->options['interpolation'][0] === $bracket[0] && preg_match($this->regexes['interpolation_end'], $this->code, $match, null, $this->cursor)) {
array_pop($this->brackets);
$this->pushToken(Twig_Token::INTERPOLATION_END_TYPE);
$this->moveCursor($match[0]);
$this->popState();
} else {
$this->lexExpression();
}
}
protected function pushToken($type, $value = '')
{
// do not push empty text tokens
if (Twig_Token::TEXT_TYPE === $type && '' === $value) {
return;
}
$this->tokens[] = new Twig_Token($type, $value, $this->lineno);
}
protected function moveCursor($text)
{
$this->cursor += strlen($text);
$this->lineno += substr_count($text, "\n");
}
protected function getOperatorRegex()
{
$operators = array_merge(
array('='),
array_keys($this->env->getUnaryOperators()),
array_keys($this->env->getBinaryOperators())
);
$operators = array_combine($operators, array_map('strlen', $operators));
arsort($operators);
$regex = array();
foreach ($operators as $operator => $length) {
// an operator that ends with a character must be followed by
// a whitespace or a parenthesis
if (ctype_alpha($operator[$length - 1])) {
$regex[] = preg_quote($operator, '/').'(?=[\s()])';
} else {
$regex[] = preg_quote($operator, '/');
}
}
return '/'.implode('|', $regex).'/A';
}
protected function pushState($state)
{
$this->states[] = $this->state;
$this->state = $state;
}
protected function popState()
{
if (0 === count($this->states)) {
throw new Exception('Cannot pop state without a previous state');
}
$this->state = array_pop($this->states);
}
}
| {
"content_hash": "075f269a919dfb99c0fb8c8c30e7988e",
"timestamp": "",
"source": "github",
"line_count": 398,
"max_line_length": 341,
"avg_line_length": 40.675879396984925,
"alnum_prop": 0.4942244734078695,
"repo_name": "WCORP/just2",
"id": "e37535818a8756df7c0fc180ce59f792832c7ecb",
"size": "16422",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "vendor/twig/twig/lib/Twig/Lexer.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1017310"
},
{
"name": "PHP",
"bytes": "405326"
}
],
"symlink_target": ""
} |
namespace browser {
BrowserNonClientFrameView* CreateBrowserNonClientFrameView(
BrowserFrame* frame, BrowserView* browser_view) {
Browser::Type type = browser_view->browser()->type();
switch (type) {
case Browser::TYPE_PANEL:
case Browser::TYPE_POPUP:
return new PopupNonClientFrameView(frame);
default:
return new CompactBrowserFrameView(frame, browser_view);
}
}
} // browser
| {
"content_hash": "c43c321bb32a27af523283ff14fae8eb",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 62,
"avg_line_length": 27.666666666666668,
"alnum_prop": 0.7156626506024096,
"repo_name": "paul99/clank",
"id": "3ee97467f81ca49325a6676af675a4595a6ab76e",
"size": "1051",
"binary": false,
"copies": "2",
"ref": "refs/heads/chrome-18.0.1025.469",
"path": "chrome/browser/chromeos/frame/browser_non_client_frame_view_factory_chromeos.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "56689"
},
{
"name": "C",
"bytes": "8707669"
},
{
"name": "C++",
"bytes": "89569069"
},
{
"name": "Go",
"bytes": "10440"
},
{
"name": "Java",
"bytes": "1201391"
},
{
"name": "JavaScript",
"bytes": "5587454"
},
{
"name": "Lua",
"bytes": "13641"
},
{
"name": "Objective-C",
"bytes": "4568468"
},
{
"name": "PHP",
"bytes": "11278"
},
{
"name": "Perl",
"bytes": "51521"
},
{
"name": "Python",
"bytes": "2615443"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Ruby",
"bytes": "107"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "588836"
}
],
"symlink_target": ""
} |
package net.hebrewcalendar.impl;
public class HebrewDate
extends HDateImpl
{
HebrewDate(int year, int month, int day)
{
super(HebrewCalendar.INSTANCE, year, month, day);
}
@Override
public long absDay()
{
return 0;
}
}
| {
"content_hash": "211d967cef56ecbe622de6f8f6e1a991",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 57,
"avg_line_length": 15.055555555555555,
"alnum_prop": 0.6125461254612546,
"repo_name": "imush/hebrewcalendar",
"id": "043b5a116b9ee0e17a0c8f3a082045b90945a908",
"size": "271",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/net/hebrewcalendar/impl/HebrewDate.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Java",
"bytes": "121348"
}
],
"symlink_target": ""
} |
using System.Collections.Generic;
using Umbraco.Core.Services;
namespace Umbraco.Web.HealthCheck.Checks.Config
{
[HealthCheck("9BED6EF4-A7F3-457A-8935-B64E9AA8BAB3", "Trace Mode",
Description = "Leaving trace mode enabled can make valuable information about your system available to hackers.",
Group = "Live Environment")]
public class TraceCheck : AbstractConfigCheck
{
public TraceCheck(ILocalizedTextService textService)
: base(textService)
{ }
public override string FilePath => "~/Web.config";
public override string XPath => "/configuration/system.web/trace/@enabled";
public override ValueComparisonType ValueComparisonType => ValueComparisonType.ShouldEqual;
public override IEnumerable<AcceptableConfiguration> Values => new List<AcceptableConfiguration>
{
new AcceptableConfiguration { IsRecommended = true, Value = bool.FalseString.ToLower() }
};
public override string CheckSuccessMessage => TextService.Localize("healthcheck", "traceModeCheckSuccessMessage");
public override string CheckErrorMessage => TextService.Localize("healthcheck", "traceModeCheckErrorMessage");
public override string RectifySuccessMessage => TextService.Localize("healthcheck", "traceModeCheckRectifySuccessMessage");
}
}
| {
"content_hash": "447c2b40f376d49da1692204be4d514f",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 131,
"avg_line_length": 41.39393939393939,
"alnum_prop": 0.7254758418740849,
"repo_name": "leekelleher/Umbraco-CMS",
"id": "47c88bf2509e699688fe0ecf923442588924dacb",
"size": "1368",
"binary": false,
"copies": "3",
"ref": "refs/heads/v8/contrib",
"path": "src/Umbraco.Web/HealthCheck/Checks/Config/TraceCheck.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "484235"
},
{
"name": "Batchfile",
"bytes": "16156"
},
{
"name": "C#",
"bytes": "16505882"
},
{
"name": "CSS",
"bytes": "676666"
},
{
"name": "HTML",
"bytes": "776273"
},
{
"name": "JavaScript",
"bytes": "4045587"
},
{
"name": "PowerShell",
"bytes": "18034"
},
{
"name": "Python",
"bytes": "876"
},
{
"name": "Ruby",
"bytes": "765"
},
{
"name": "XSLT",
"bytes": "50045"
}
],
"symlink_target": ""
} |
using Builder.Utility;
using NugetWorker;
using System;
using System.IO;
using Builder.Engine;
namespace Builder
{
class Builder
{
static void Main(string[] args)
{
Console.WriteLine("Builder Task Begins!");
//create log file name for this session
var logFileName = $"{DateTime.Now.ToString("yyyy_MM_dd")}_{Guid.NewGuid().ToString()}.log";
Console.WriteLine($"going to create logger!!");
try
{
string _logdirectory = BuilderHelper.Instance.builderSettings.BuildLogDirectory;
BuilderHelper.Instance._logFileName = Path.Combine(_logdirectory, logFileName);
BuilderHelper.Instance.logger = new Utility.Logger(
BuilderHelper.Instance._logFileName);
//set the same for nuget engine dll
NugetHelper.Instance.logger = new NugetWorker.Logger(BuilderHelper.Instance._logFileName);
Console.WriteLine($"detailed logs for this build will be at : {Path.Combine(_logdirectory, logFileName)}!!");
BuilderEngine builderEngine = new BuilderEngine();
builderEngine.BuildPackage().Wait();
}
catch (Exception ex)
{
string detailedException = string.Empty;
try
{
detailedException= BuilderHelper.Instance.DeepException(ex);
Console.WriteLine($"Exception During Build : {Environment.NewLine} {ex.Message} | {ex.StackTrace} | {Environment.NewLine} {detailedException}");
}
catch(Exception childEx)
{
//do nothing , just log orignal exception
Console.WriteLine($"{Environment.NewLine} Exception During Build :{ex.Message} |{Environment.NewLine} {ex.StackTrace} {Environment.NewLine} ");
}
//now throw back exception so that build gets failed via builder
throw;
}
Console.WriteLine("Builder Task Ends!");
}
}
}
| {
"content_hash": "d8275867b62433603ac0bd174c89d5c6",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 164,
"avg_line_length": 39.69090909090909,
"alnum_prop": 0.5639028859367843,
"repo_name": "life1347/fission",
"id": "881640bd9f0ff82fb5e02a4d9f98490b40cf04bf",
"size": "2185",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "environments/dotnet20/builder/Builder.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "74312"
},
{
"name": "Dockerfile",
"bytes": "11423"
},
{
"name": "Go",
"bytes": "1454635"
},
{
"name": "HCL",
"bytes": "1275"
},
{
"name": "Java",
"bytes": "4527"
},
{
"name": "JavaScript",
"bytes": "7718"
},
{
"name": "Makefile",
"bytes": "1556"
},
{
"name": "PHP",
"bytes": "2920"
},
{
"name": "Perl",
"bytes": "852"
},
{
"name": "Python",
"bytes": "4722"
},
{
"name": "Roff",
"bytes": "949"
},
{
"name": "Ruby",
"bytes": "5042"
},
{
"name": "Shell",
"bytes": "193063"
},
{
"name": "Smarty",
"bytes": "3049"
}
],
"symlink_target": ""
} |
- Updated to be compatible with latest PHPUnit.
- Updated to the latest PSR-3 (PSR Log) version.
- Added link to `apix/log-tracker`.
#### Version `1.2.1` (15-Oct-2015)
- Added `\LogicException` when the stream has been `__destruct()` too early.
#### Version `1.2.0` (18-Sep-2015)
NOTE: The major version number update is due to some internal changes. The actual client methods have not been changed, i.e. has the same signatures as the `1.1.*` branch.
- Changes to the handling and processing of each individual log entry (resulting in memory and CPU optimisation).
- Refactored and documented `LoggerInterface` (better API for contribution).
- Added `LogFormatter` class.
- Changed the ordering of the log levels to match [RFC5424](http://tools.ietf.org/html/rfc5424#section-6.2.1) (thanks @jspalink).
- Added a `Stream` logger.
- Added functional tests to support the README's examples.
- Added link to `jspalink/apix-log-pushover`.
- Updated the README.md
#### Version `1.1.4` (10-Sep-2015)
- Added link to `PHPMailer/apix-log-phpmailer`.
- Bug fixes.
#### Version `1.1.3` (10-Sep-2015)
- Added `setDeferred` so processing of logs happen at `__destruct` time on a bucket and/or logger level (thanks @Synchro).
- Updated the README.md accordingly.
#### Version `1.1.2` (28-Aug-2015)
- Updated the README.md
- Added HHVM support.
- Updated PHPUnit to 4.8 version.
- Added PHP7 support.
- Made Travis faster (using Docker containers and skipping allowable failures).
#### Version `1.1.1` (11-Jun-2015)
- Updated the README.md
- Fixed setCascading (just uncommented).
#### Version `1.1.0` (11-Jun-2015)
- Fixed a PHP 5.3 specific syntax error (unit-test)
- Renamed `Apix\Log\Logger\Null` to `Apix\Log\Logger\Nil`. 'Null' as a classname is now reserved to PHP7 usage, see [PHP RFC: Reserve More Types in PHP 7](https://wiki.php.net/rfc/reserve_more_types_in_php_7)
- Some semantic modifications e.g. now using "Log Buckets" to holds loggers.
- Added bucket self-prioritization as opposed to the FIFO mode used until now.
- Fixed the cascading or not of log entries to subsequent loggers.
- Added some aditional tests -- 100% code coverage!
#### Version `1.0.2` (10-Jun-2015)
- Added the logged message can be the context directly i.e. not a string.
- Added `\InvalidArgumentException` with an explicit message to the main constructor.
- Added handling of Exception as context e.g. `$logger->critical( new \Exception('Boo!') )`.
#### Version `1.0.1` (9-Jun-2015)
- Added Scrutinizer checks.
- Added `.gitattributes` file.
- Added a unit tests `bootstrap.php` file.
- Added a default timezone to the unit tests bootstrapper.
- Fixed the context array handler (convert data to JSON).
- Added additional tests and minor changes.
- Updated the examples in `README.md`.
- Added a `CHANGELOG.md` file.
#### Version `1.0.0` (30-Sept-2014)
- Initial release.
<pre>
_|_| _|_| _| _| _|
_| _| _| _| _| _|
_| _| _| _| _| _|_|
_|_|_|_| _|_|_| _| _|_| _|_|
_| _| _| _| _| _|
_| _| _| _| _| _|
</pre>
| {
"content_hash": "721af96049ae0b6425814e96f22f0dd9",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 208,
"avg_line_length": 43.45070422535211,
"alnum_prop": 0.6755267423014587,
"repo_name": "frqnck/apix-log",
"id": "293899691a943e89bdf731539ea4cd32bf314e1a",
"size": "3142",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CHANGELOG.md",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "PHP",
"bytes": "47197"
}
],
"symlink_target": ""
} |
USE GSFSchema;
DELIMITER $$
CREATE TRIGGER UserAccount_AuditUpdate AFTER UPDATE ON UserAccount FOR EACH ROW
BEGIN
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Password != NEW.Password THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Password', OriginalValue = OLD.Password, NewValue = NEW.Password, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.FirstName != NEW.FirstName THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'FirstName', OriginalValue = OLD.FirstName, NewValue = NEW.FirstName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LastName != NEW.LastName THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LastName', OriginalValue = OLD.LastName, NewValue = NEW.LastName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.DefaultNodeID != NEW.DefaultNodeID THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DefaultNodeID', OriginalValue = OLD.DefaultNodeID, NewValue = NEW.DefaultNodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Phone != NEW.Phone THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Phone', OriginalValue = OLD.Phone, NewValue = NEW.Phone, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Email != NEW.Email THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Email', OriginalValue = OLD.Email, NewValue = NEW.Email, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LockedOut != NEW.LockedOut THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LockedOut', OriginalValue = OLD.LockedOut, NewValue = NEW.LockedOut, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UseADAuthentication != NEW.UseADAuthentication THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UseADAuthentication', OriginalValue = OLD.UseADAuthentication, NewValue = NEW.UseADAuthentication, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ChangePasswordOn != NEW.ChangePasswordOn THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ChangePasswordOn', OriginalValue = OLD.ChangePasswordOn, NewValue = NEW.ChangePasswordOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER UserAccount_AuditDelete AFTER DELETE ON UserAccount
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Password', OriginalValue = OLD.Password, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'FirstName', OriginalValue = OLD.FirstName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LastName', OriginalValue = OLD.LastName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DefaultNodeID', OriginalValue = OLD.DefaultNodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Phone', OriginalValue = OLD.Phone, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Email', OriginalValue = OLD.Email, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LockedOut', OriginalValue = OLD.LockedOut, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UseADAuthentication', OriginalValue = OLD.UseADAuthentication, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ChangePasswordOn', OriginalValue = OLD.ChangePasswordOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'UserAccount', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER SecurityGroup_AuditUpdate AFTER UPDATE ON SecurityGroup
FOR EACH ROW BEGIN
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Description != NEW.Description THEN
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NEW.Description, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER SecurityGroup_AuditDelete AFTER DELETE ON SecurityGroup
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'SecurityGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER ApplicationRole_AuditUpdate AFTER UPDATE ON ApplicationRole
FOR EACH ROW BEGIN
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Description != NEW.Description THEN
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NEW.Description, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER ApplicationRole_AuditDelete AFTER DELETE ON ApplicationRole
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'ApplicationRole', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER CalculatedMeasurement_AuditUpdate AFTER UPDATE ON CalculatedMeasurement
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Acronym != NEW.Acronym THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NEW.Acronym, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AssemblyName != NEW.AssemblyName THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AssemblyName', OriginalValue = OLD.AssemblyName, NewValue = NEW.AssemblyName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.TypeName != NEW.TypeName THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TypeName', OriginalValue = OLD.TypeName, NewValue = NEW.TypeName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ConnectionString != NEW.ConnectionString THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NEW.ConnectionString, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ConfigSection != NEW.ConfigSection THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConfigSection', OriginalValue = OLD.ConfigSection, NewValue = NEW.ConfigSection, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.InputMeasurements != NEW.InputMeasurements THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'InputMeasurements', OriginalValue = OLD.InputMeasurements, NewValue = NEW.InputMeasurements, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.OutputMeasurements != NEW.OutputMeasurements THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'OutputMeasurements', OriginalValue = OLD.OutputMeasurements, NewValue = NEW.OutputMeasurements, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.MinimumMeasurementsToUse != NEW.MinimumMeasurementsToUse THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MinimumMeasurementsToUse', OriginalValue = OLD.MinimumMeasurementsToUse, NewValue = NEW.MinimumMeasurementsToUse, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.FramesPerSecond != NEW.FramesPerSecond THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'FramesPerSecond', OriginalValue = OLD.FramesPerSecond, NewValue = NEW.FramesPerSecond, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LagTime != NEW.LagTime THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LagTime', OriginalValue = OLD.LagTime, NewValue = NEW.LagTime, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LeadTime != NEW.LeadTime THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LeadTime', OriginalValue = OLD.LeadTime, NewValue = NEW.LeadTime, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UseLocalClockAsRealTime != NEW.UseLocalClockAsRealTime THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UseLocalClockAsRealTime', OriginalValue = OLD.UseLocalClockAsRealTime, NewValue = NEW.UseLocalClockAsRealTime, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AllowSortsByArrival != NEW.AllowSortsByArrival THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowSortsByArrival', OriginalValue = OLD.AllowSortsByArrival, NewValue = NEW.AllowSortsByArrival, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.IgnoreBadTimestamps != NEW.IgnoreBadTimestamps THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IgnoreBadTimestamps', OriginalValue = OLD.IgnoreBadTimestamps, NewValue = NEW.IgnoreBadTimestamps, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.TimeResolution != NEW.TimeResolution THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TimeResolution', OriginalValue = OLD.TimeResolution, NewValue = NEW.TimeResolution, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AllowPreemptivePublishing != NEW.AllowPreemptivePublishing THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowPreemptivePublishing', OriginalValue = OLD.AllowPreemptivePublishing, NewValue = NEW.AllowPreemptivePublishing, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.PerformTimeReasonabilityCheck != NEW.PerformTimeReasonabilityCheck THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'PerformTimeReasonabilityCheck', OriginalValue = OLD.PerformTimeReasonabilityCheck, NewValue = NEW.PerformTimeReasonabilityCheck, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.DownsamplingMethod != NEW.DownsamplingMethod THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DownsamplingMethod', OriginalValue = OLD.DownsamplingMethod, NewValue = NEW.DownsamplingMethod, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER CalculatedMeasurement_AuditDelete AFTER DELETE ON CalculatedMeasurement
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AssemblyName', OriginalValue = OLD.AssemblyName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TypeName', OriginalValue = OLD.TypeName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConfigSection', OriginalValue = OLD.ConfigSection, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'InputMeasurements', OriginalValue = OLD.InputMeasurements, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'OutputMeasurements', OriginalValue = OLD.OutputMeasurements, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MinimumMeasurementsToUse', OriginalValue = OLD.MinimumMeasurementsToUse, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'FramesPerSecond', OriginalValue = OLD.FramesPerSecond, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LagTime', OriginalValue = OLD.LagTime, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LeadTime', OriginalValue = OLD.LeadTime, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UseLocalClockAsRealTime', OriginalValue = OLD.UseLocalClockAsRealTime, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowSortsByArrival', OriginalValue = OLD.AllowSortsByArrival, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IgnoreBadTimestamps', OriginalValue = OLD.IgnoreBadTimestamps, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TimeResolution', OriginalValue = OLD.TimeResolution, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowPreemptivePublishing', OriginalValue = OLD.AllowPreemptivePublishing, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'PerformTimeReasonabilityCheck', OriginalValue = OLD.PerformTimeReasonabilityCheck, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DownsamplingMethod', OriginalValue = OLD.DownsamplingMethod, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CalculatedMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER Company_AuditUpdate AFTER UPDATE ON Company
FOR EACH ROW BEGIN
IF OLD.Acronym != NEW.Acronym THEN
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NEW.Acronym, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.MapAcronym != NEW.MapAcronym THEN
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MapAcronym', OriginalValue = OLD.MapAcronym, NewValue = NEW.MapAcronym, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Url != NEW.Url THEN
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Url', OriginalValue = OLD.Url, NewValue = NEW.Url, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER Company_AuditDelete AFTER DELETE ON Company
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MapAcronym', OriginalValue = OLD.MapAcronym, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Url', OriginalValue = OLD.Url, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Company', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER CustomActionAdapter_AuditUpdate AFTER UPDATE ON CustomActionAdapter
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AdapterName != NEW.AdapterName THEN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AdapterName', OriginalValue = OLD.AdapterName, NewValue = NEW.AdapterName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AssemblyName != NEW.AssemblyName THEN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AssemblyName', OriginalValue = OLD.AssemblyName, NewValue = NEW.AssemblyName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.TypeName != NEW.TypeName THEN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TypeName', OriginalValue = OLD.TypeName, NewValue = NEW.TypeName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ConnectionString != NEW.ConnectionString THEN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NEW.ConnectionString, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER CustomActionAdapter_AuditDelete AFTER DELETE ON CustomActionAdapter
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AdapterName', OriginalValue = OLD.AdapterName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AssemblyName', OriginalValue = OLD.AssemblyName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TypeName', OriginalValue = OLD.TypeName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomActionAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER CustomInputAdapter_AuditUpdate AFTER UPDATE ON CustomInputAdapter
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AdapterName != NEW.AdapterName THEN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AdapterName', OriginalValue = OLD.AdapterName, NewValue = NEW.AdapterName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AssemblyName != NEW.AssemblyName THEN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AssemblyName', OriginalValue = OLD.AssemblyName, NewValue = NEW.AssemblyName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.TypeName != NEW.TypeName THEN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TypeName', OriginalValue = OLD.TypeName, NewValue = NEW.TypeName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ConnectionString != NEW.ConnectionString THEN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NEW.ConnectionString, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER CustomInputAdapter_AuditDelete AFTER DELETE ON CustomInputAdapter
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AdapterName', OriginalValue = OLD.AdapterName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AssemblyName', OriginalValue = OLD.AssemblyName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TypeName', OriginalValue = OLD.TypeName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomInputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER CustomOutputAdapter_AuditUpdate AFTER UPDATE ON CustomOutputAdapter
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AdapterName != NEW.AdapterName THEN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AdapterName', OriginalValue = OLD.AdapterName, NewValue = NEW.AdapterName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AssemblyName != NEW.AssemblyName THEN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AssemblyName', OriginalValue = OLD.AssemblyName, NewValue = NEW.AssemblyName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.TypeName != NEW.TypeName THEN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TypeName', OriginalValue = OLD.TypeName, NewValue = NEW.TypeName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ConnectionString != NEW.ConnectionString THEN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NEW.ConnectionString, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER CustomOutputAdapter_AuditDelete AFTER DELETE ON CustomOutputAdapter
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AdapterName', OriginalValue = OLD.AdapterName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AssemblyName', OriginalValue = OLD.AssemblyName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TypeName', OriginalValue = OLD.TypeName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'CustomOutputAdapter', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER Device_AuditUpdate AFTER UPDATE ON Device
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ParentID != NEW.ParentID THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ParentID', OriginalValue = OLD.ParentID, NewValue = NEW.ParentID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UniqueID != NEW.UniqueID THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UniqueID', OriginalValue = OLD.UniqueID, NewValue = NEW.UniqueID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Acronym != NEW.Acronym THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NEW.Acronym, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.OriginalSource != NEW.OriginalSource THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'OriginalSource', OriginalValue = OLD.OriginalSource, NewValue = NEW.OriginalSource, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.IsConcentrator != NEW.IsConcentrator THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IsConcentrator', OriginalValue = OLD.IsConcentrator, NewValue = NEW.IsConcentrator, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CompanyID != NEW.CompanyID THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CompanyID', OriginalValue = OLD.CompanyID, NewValue = NEW.CompanyID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.HistorianID != NEW.HistorianID THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'HistorianID', OriginalValue = OLD.HistorianID, NewValue = NEW.HistorianID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AccessID != NEW.AccessID THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AccessID', OriginalValue = OLD.AccessID, NewValue = NEW.AccessID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.VendorDeviceID != NEW.VendorDeviceID THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'VendorDeviceID', OriginalValue = OLD.VendorDeviceID, NewValue = NEW.VendorDeviceID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ProtocolID != NEW.ProtocolID THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ProtocolID', OriginalValue = OLD.ProtocolID, NewValue = NEW.ProtocolID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Longitude != NEW.Longitude THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Longitude', OriginalValue = OLD.Longitude, NewValue = NEW.Longitude, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Latitude != NEW.Latitude THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Latitude', OriginalValue = OLD.Latitude, NewValue = NEW.Latitude, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.InterconnectionID != NEW.InterconnectionID THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'InterconnectionID', OriginalValue = OLD.InterconnectionID, NewValue = NEW.InterconnectionID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ConnectionString != NEW.ConnectionString THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NEW.ConnectionString, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ConnectOnDemand != NEW.ConnectOnDemand THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectOnDemand', OriginalValue = OLD.ConnectOnDemand, NewValue = NEW.ConnectOnDemand, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.TimeZone != NEW.TimeZone THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TimeZone', OriginalValue = OLD.TimeZone, NewValue = NEW.TimeZone, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.FramesPerSecond != NEW.FramesPerSecond THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'FramesPerSecond', OriginalValue = OLD.FramesPerSecond, NewValue = NEW.FramesPerSecond, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.TimeAdjustmentTicks != NEW.TimeAdjustmentTicks THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TimeAdjustmentTicks', OriginalValue = OLD.TimeAdjustmentTicks, NewValue = NEW.TimeAdjustmentTicks, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.DataLossInterval != NEW.DataLossInterval THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DataLossInterval', OriginalValue = OLD.DataLossInterval, NewValue = NEW.DataLossInterval, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AllowedParsingExceptions != NEW.AllowedParsingExceptions THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowedParsingExceptions', OriginalValue = OLD.AllowedParsingExceptions, NewValue = NEW.AllowedParsingExceptions, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ParsingExceptionWindow != NEW.ParsingExceptionWindow THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ParsingExceptionWindow', OriginalValue = OLD.ParsingExceptionWindow, NewValue = NEW.ParsingExceptionWindow, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.DelayedConnectionInterval != NEW.DelayedConnectionInterval THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DelayedConnectionInterval', OriginalValue = OLD.DelayedConnectionInterval, NewValue = NEW.DelayedConnectionInterval, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AllowUseOfCachedConfiguration != NEW.AllowUseOfCachedConfiguration THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowUseOfCachedConfiguration', OriginalValue = OLD.AllowUseOfCachedConfiguration, NewValue = NEW.AllowUseOfCachedConfiguration, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AutoStartDataParsingSequence != NEW.AutoStartDataParsingSequence THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AutoStartDataParsingSequence', OriginalValue = OLD.AutoStartDataParsingSequence, NewValue = NEW.AutoStartDataParsingSequence, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.SkipDisableRealTimeData != NEW.SkipDisableRealTimeData THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'SkipDisableRealTimeData', OriginalValue = OLD.SkipDisableRealTimeData, NewValue = NEW.SkipDisableRealTimeData, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.MeasurementReportingInterval != NEW.MeasurementReportingInterval THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MeasurementReportingInterval', OriginalValue = OLD.MeasurementReportingInterval, NewValue = NEW.MeasurementReportingInterval, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ContactList != NEW.ContactList THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ContactList', OriginalValue = OLD.ContactList, NewValue = NEW.ContactList, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.MeasuredLines != NEW.MeasuredLines THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MeasuredLines', OriginalValue = OLD.MeasuredLines, NewValue = NEW.MeasuredLines, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER Device_AuditDelete AFTER DELETE ON Device
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ParentID', OriginalValue = OLD.ParentID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UniqueID', OriginalValue = OLD.UniqueID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IsConcentrator', OriginalValue = OLD.IsConcentrator, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CompanyID', OriginalValue = OLD.CompanyID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'HistorianID', OriginalValue = OLD.HistorianID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AccessID', OriginalValue = OLD.AccessID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'VendorDeviceID', OriginalValue = OLD.VendorDeviceID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ProtocolID', OriginalValue = OLD.ProtocolID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Longitude', OriginalValue = OLD.Longitude, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Latitude', OriginalValue = OLD.Latitude, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'InterconnectionID', OriginalValue = OLD.InterconnectionID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectOnDemand', OriginalValue = OLD.ConnectOnDemand, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TimeZone', OriginalValue = OLD.TimeZone, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'FramesPerSecond', OriginalValue = OLD.FramesPerSecond, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TimeAdjustmentTicks', OriginalValue = OLD.TimeAdjustmentTicks, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DataLossInterval', OriginalValue = OLD.DataLossInterval, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowedParsingExceptions', OriginalValue = OLD.AllowedParsingExceptions, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ParsingExceptionWindow', OriginalValue = OLD.ParsingExceptionWindow, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DelayedConnectionInterval', OriginalValue = OLD.DelayedConnectionInterval, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowUseOfCachedConfiguration', OriginalValue = OLD.AllowUseOfCachedConfiguration, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AutoStartDataParsingSequence', OriginalValue = OLD.AutoStartDataParsingSequence, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'SkipDisableRealTimeData', OriginalValue = OLD.SkipDisableRealTimeData, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MeasurementReportingInterval', OriginalValue = OLD.MeasurementReportingInterval, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ContactList', OriginalValue = OLD.ContactList, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MeasuredLines', OriginalValue = OLD.MeasuredLines, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Device', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER Historian_AuditUpdate AFTER UPDATE ON Historian
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Acronym != NEW.Acronym THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NEW.Acronym, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AssemblyName != NEW.AssemblyName THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AssemblyName', OriginalValue = OLD.AssemblyName, NewValue = NEW.AssemblyName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.TypeName != NEW.TypeName THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TypeName', OriginalValue = OLD.TypeName, NewValue = NEW.TypeName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ConnectionString != NEW.ConnectionString THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NEW.ConnectionString, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.IsLocal != NEW.IsLocal THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IsLocal', OriginalValue = OLD.IsLocal, NewValue = NEW.IsLocal, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.MeasurementReportingInterval != NEW.MeasurementReportingInterval THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MeasurementReportingInterval', OriginalValue = OLD.MeasurementReportingInterval, NewValue = NEW.MeasurementReportingInterval, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Description != NEW.Description THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NEW.Description, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER Historian_AuditDelete AFTER DELETE ON Historian
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AssemblyName', OriginalValue = OLD.AssemblyName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TypeName', OriginalValue = OLD.TypeName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IsLocal', OriginalValue = OLD.IsLocal, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MeasurementReportingInterval', OriginalValue = OLD.MeasurementReportingInterval, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Historian', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER Measurement_AuditUpdate AFTER UPDATE ON Measurement
FOR EACH ROW BEGIN
IF OLD.HistorianID != NEW.HistorianID THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'HistorianID', OriginalValue = OLD.HistorianID, NewValue = NEW.HistorianID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.PointID != NEW.PointID THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'PointID', OriginalValue = OLD.PointID, NewValue = NEW.PointID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.DeviceID != NEW.DeviceID THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'DeviceID', OriginalValue = OLD.DeviceID, NewValue = NEW.DeviceID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.PointTag != NEW.PointTag THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'PointTag', OriginalValue = OLD.PointTag, NewValue = NEW.PointTag, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AlternateTag != NEW.AlternateTag THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'AlternateTag', OriginalValue = OLD.AlternateTag, NewValue = NEW.AlternateTag, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.SignalTypeID != NEW.SignalTypeID THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'SignalTypeID', OriginalValue = OLD.SignalTypeID, NewValue = NEW.SignalTypeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.PhasorSourceIndex != NEW.PhasorSourceIndex THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'PhasorSourceIndex', OriginalValue = OLD.PhasorSourceIndex, NewValue = NEW.PhasorSourceIndex, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.SignalReference != NEW.SignalReference THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'SignalReference', OriginalValue = OLD.SignalReference, NewValue = NEW.SignalReference, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Adder != NEW.Adder THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Adder', OriginalValue = OLD.Adder, NewValue = NEW.Adder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Multiplier != NEW.Multiplier THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Multiplier', OriginalValue = OLD.Multiplier, NewValue = NEW.Multiplier, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Description != NEW.Description THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NEW.Description, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Subscribed != NEW.Subscribed THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Subscribed', OriginalValue = OLD.Subscribed, NewValue = NEW.Subscribed, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Internal != NEW.Internal THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Internal', OriginalValue = OLD.Internal, NewValue = NEW.Internal, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER Measurement_AuditDelete AFTER DELETE ON Measurement
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'HistorianID', OriginalValue = OLD.HistorianID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'PointID', OriginalValue = OLD.PointID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'DeviceID', OriginalValue = OLD.DeviceID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'PointTag', OriginalValue = OLD.PointTag, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'AlternateTag', OriginalValue = OLD.AlternateTag, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'SignalTypeID', OriginalValue = OLD.SignalTypeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'PhasorSourceIndex', OriginalValue = OLD.PhasorSourceIndex, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'SignalReference', OriginalValue = OLD.SignalReference, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Adder', OriginalValue = OLD.Adder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Multiplier', OriginalValue = OLD.Multiplier, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Subscribed', OriginalValue = OLD.Subscribed, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Internal', OriginalValue = OLD.Internal, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Measurement', PrimaryKeyColumn = 'SignalID', PrimaryKeyValue = OLD.SignalID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER Node_AuditUpdate AFTER UPDATE ON Node
FOR EACH ROW BEGIN
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CompanyID != NEW.CompanyID THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CompanyID', OriginalValue = OLD.CompanyID, NewValue = NEW.CompanyID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Longitude != NEW.Longitude THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Longitude', OriginalValue = OLD.Longitude, NewValue = NEW.Longitude, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Latitude != NEW.Latitude THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Latitude', OriginalValue = OLD.Latitude, NewValue = NEW.Latitude, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Description != NEW.Description THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NEW.Description, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ImagePath != NEW.ImagePath THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ImagePath', OriginalValue = OLD.ImagePath, NewValue = NEW.ImagePath, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Settings != NEW.Settings THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Settings', OriginalValue = OLD.Settings, NewValue = NEW.Settings, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.MenuType != NEW.MenuType THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MenuType', OriginalValue = OLD.MenuType, NewValue = NEW.MenuType, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.MenuData != NEW.MenuData THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MenuData', OriginalValue = OLD.MenuData, NewValue = NEW.MenuData, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Master != NEW.Master THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Master', OriginalValue = OLD.Master, NewValue = NEW.Master, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER Node_AuditDelete AFTER DELETE ON Node
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CompanyID', OriginalValue = OLD.CompanyID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Longitude', OriginalValue = OLD.Longitude, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Latitude', OriginalValue = OLD.Latitude, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ImagePath', OriginalValue = OLD.ImagePath, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Settings', OriginalValue = OLD.Settings, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MenuType', OriginalValue = OLD.MenuType, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MenuData', OriginalValue = OLD.MenuData, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Master', OriginalValue = OLD.Master, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Node', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER OtherDevice_AuditUpdate AFTER UPDATE ON OtherDevice
FOR EACH ROW BEGIN
IF OLD.Acronym != NEW.Acronym THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NEW.Acronym, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.IsConcentrator != NEW.IsConcentrator THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IsConcentrator', OriginalValue = OLD.IsConcentrator, NewValue = NEW.IsConcentrator, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CompanyID != NEW.CompanyID THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CompanyID', OriginalValue = OLD.CompanyID, NewValue = NEW.CompanyID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.VendorDeviceID != NEW.VendorDeviceID THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'VendorDeviceID', OriginalValue = OLD.VendorDeviceID, NewValue = NEW.VendorDeviceID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Longitude != NEW.Longitude THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Longitude', OriginalValue = OLD.Longitude, NewValue = NEW.Longitude, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Latitude != NEW.Latitude THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Latitude', OriginalValue = OLD.Latitude, NewValue = NEW.Latitude, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.InterconnectionID != NEW.InterconnectionID THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'InterconnectionID', OriginalValue = OLD.InterconnectionID, NewValue = NEW.InterconnectionID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Planned != NEW.Planned THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Planned', OriginalValue = OLD.Planned, NewValue = NEW.Planned, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Desired != NEW.Desired THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Desired', OriginalValue = OLD.Desired, NewValue = NEW.Desired, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.InProgress != NEW.InProgress THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'InProgress', OriginalValue = OLD.InProgress, NewValue = NEW.InProgress, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER OtherDevice_AuditDelete AFTER DELETE ON OtherDevice
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IsConcentrator', OriginalValue = OLD.IsConcentrator, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CompanyID', OriginalValue = OLD.CompanyID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'VendorDeviceID', OriginalValue = OLD.VendorDeviceID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Longitude', OriginalValue = OLD.Longitude, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Latitude', OriginalValue = OLD.Latitude, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'InterconnectionID', OriginalValue = OLD.InterconnectionID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Planned', OriginalValue = OLD.Planned, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Desired', OriginalValue = OLD.Desired, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'InProgress', OriginalValue = OLD.InProgress, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OtherDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER OutputStream_AuditUpdate AFTER UPDATE ON OutputStream
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Acronym != NEW.Acronym THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NEW.Acronym, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Type != NEW.Type THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Type', OriginalValue = OLD.Type, NewValue = NEW.Type, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ConnectionString != NEW.ConnectionString THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NEW.ConnectionString, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.DataChannel != NEW.DataChannel THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DataChannel', OriginalValue = OLD.DataChannel, NewValue = NEW.DataChannel, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CommandChannel != NEW.CommandChannel THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CommandChannel', OriginalValue = OLD.CommandChannel, NewValue = NEW.CommandChannel, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.IDCode != NEW.IDCode THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IDCode', OriginalValue = OLD.IDCode, NewValue = NEW.IDCode, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AutoPublishConfigFrame != NEW.AutoPublishConfigFrame THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AutoPublishConfigFrame', OriginalValue = OLD.AutoPublishConfigFrame, NewValue = NEW.AutoPublishConfigFrame, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AutoStartDataChannel != NEW.AutoStartDataChannel THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AutoStartDataChannel', OriginalValue = OLD.AutoStartDataChannel, NewValue = NEW.AutoStartDataChannel, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.NominalFrequency != NEW.NominalFrequency THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NominalFrequency', OriginalValue = OLD.NominalFrequency, NewValue = NEW.NominalFrequency, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.FramesPerSecond != NEW.FramesPerSecond THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'FramesPerSecond', OriginalValue = OLD.FramesPerSecond, NewValue = NEW.FramesPerSecond, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LagTime != NEW.LagTime THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LagTime', OriginalValue = OLD.LagTime, NewValue = NEW.LagTime, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LeadTime != NEW.LeadTime THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LeadTime', OriginalValue = OLD.LeadTime, NewValue = NEW.LeadTime, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UseLocalClockAsRealTime != NEW.UseLocalClockAsRealTime THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UseLocalClockAsRealTime', OriginalValue = OLD.UseLocalClockAsRealTime, NewValue = NEW.UseLocalClockAsRealTime, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AllowSortsByArrival != NEW.AllowSortsByArrival THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowSortsByArrival', OriginalValue = OLD.AllowSortsByArrival, NewValue = NEW.AllowSortsByArrival, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.IgnoreBadTimestamps != NEW.IgnoreBadTimestamps THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IgnoreBadTimestamps', OriginalValue = OLD.IgnoreBadTimestamps, NewValue = NEW.IgnoreBadTimestamps, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.TimeResolution != NEW.TimeResolution THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TimeResolution', OriginalValue = OLD.TimeResolution, NewValue = NEW.TimeResolution, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AllowPreemptivePublishing != NEW.AllowPreemptivePublishing THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowPreemptivePublishing', OriginalValue = OLD.AllowPreemptivePublishing, NewValue = NEW.AllowPreemptivePublishing, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.PerformTimeReasonabilityCheck != NEW.PerformTimeReasonabilityCheck THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'PerformTimeReasonabilityCheck', OriginalValue = OLD.PerformTimeReasonabilityCheck, NewValue = NEW.PerformTimeReasonabilityCheck, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.DownsamplingMethod != NEW.DownsamplingMethod THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DownsamplingMethod', OriginalValue = OLD.DownsamplingMethod, NewValue = NEW.DownsamplingMethod, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.DataFormat != NEW.DataFormat THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DataFormat', OriginalValue = OLD.DataFormat, NewValue = NEW.DataFormat, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CoordinateFormat != NEW.CoordinateFormat THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CoordinateFormat', OriginalValue = OLD.CoordinateFormat, NewValue = NEW.CoordinateFormat, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CurrentScalingValue != NEW.CurrentScalingValue THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CurrentScalingValue', OriginalValue = OLD.CurrentScalingValue, NewValue = NEW.CurrentScalingValue, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.VoltageScalingValue != NEW.VoltageScalingValue THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'VoltageScalingValue', OriginalValue = OLD.VoltageScalingValue, NewValue = NEW.VoltageScalingValue, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AnalogScalingValue != NEW.AnalogScalingValue THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AnalogScalingValue', OriginalValue = OLD.AnalogScalingValue, NewValue = NEW.AnalogScalingValue, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.DigitalMaskValue != NEW.DigitalMaskValue THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DigitalMaskValue', OriginalValue = OLD.DigitalMaskValue, NewValue = NEW.DigitalMaskValue, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER OutputStream_AuditDelete AFTER DELETE ON OutputStream
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Type', OriginalValue = OLD.Type, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ConnectionString', OriginalValue = OLD.ConnectionString, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DataChannel', OriginalValue = OLD.DataChannel, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CommandChannel', OriginalValue = OLD.CommandChannel, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IDCode', OriginalValue = OLD.IDCode, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AutoPublishConfigFrame', OriginalValue = OLD.AutoPublishConfigFrame, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AutoStartDataChannel', OriginalValue = OLD.AutoStartDataChannel, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NominalFrequency', OriginalValue = OLD.NominalFrequency, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'FramesPerSecond', OriginalValue = OLD.FramesPerSecond, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LagTime', OriginalValue = OLD.LagTime, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LeadTime', OriginalValue = OLD.LeadTime, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UseLocalClockAsRealTime', OriginalValue = OLD.UseLocalClockAsRealTime, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowSortsByArrival', OriginalValue = OLD.AllowSortsByArrival, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IgnoreBadTimestamps', OriginalValue = OLD.IgnoreBadTimestamps, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'TimeResolution', OriginalValue = OLD.TimeResolution, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AllowPreemptivePublishing', OriginalValue = OLD.AllowPreemptivePublishing, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'PerformTimeReasonabilityCheck', OriginalValue = OLD.PerformTimeReasonabilityCheck, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DownsamplingMethod', OriginalValue = OLD.DownsamplingMethod, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DataFormat', OriginalValue = OLD.DataFormat, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CoordinateFormat', OriginalValue = OLD.CoordinateFormat, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CurrentScalingValue', OriginalValue = OLD.CurrentScalingValue, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'VoltageScalingValue', OriginalValue = OLD.VoltageScalingValue, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AnalogScalingValue', OriginalValue = OLD.AnalogScalingValue, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DigitalMaskValue', OriginalValue = OLD.DigitalMaskValue, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStream', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER OutputStreamDevice_AuditUpdate AFTER UPDATE ON OutputStreamDevice
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AdapterID != NEW.AdapterID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AdapterID', OriginalValue = OLD.AdapterID, NewValue = NEW.AdapterID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.IDCode != NEW.IDCode THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IDCode', OriginalValue = OLD.IDCode, NewValue = NEW.IDCode, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Acronym != NEW.Acronym THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NEW.Acronym, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.BpaAcronym != NEW.BpaAcronym THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'BpaAcronym', OriginalValue = OLD.BpaAcronym, NewValue = NEW.BpaAcronym, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.PhasorDataFormat != NEW.PhasorDataFormat THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'PhasorDataFormat', OriginalValue = OLD.PhasorDataFormat, NewValue = NEW.PhasorDataFormat, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.FrequencyDataFormat != NEW.FrequencyDataFormat THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'FrequencyDataFormat', OriginalValue = OLD.FrequencyDataFormat, NewValue = NEW.FrequencyDataFormat, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AnalogDataFormat != NEW.AnalogDataFormat THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AnalogDataFormat', OriginalValue = OLD.AnalogDataFormat, NewValue = NEW.AnalogDataFormat, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CoordinateFormat != NEW.CoordinateFormat THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CoordinateFormat', OriginalValue = OLD.CoordinateFormat, NewValue = NEW.CoordinateFormat, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER OutputStreamDevice_AuditDelete AFTER DELETE ON OutputStreamDevice
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AdapterID', OriginalValue = OLD.AdapterID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'IDCode', OriginalValue = OLD.IDCode, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'BpaAcronym', OriginalValue = OLD.BpaAcronym, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'PhasorDataFormat', OriginalValue = OLD.PhasorDataFormat, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'FrequencyDataFormat', OriginalValue = OLD.FrequencyDataFormat, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AnalogDataFormat', OriginalValue = OLD.AnalogDataFormat, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CoordinateFormat', OriginalValue = OLD.CoordinateFormat, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER OutputStreamDeviceAnalog_AuditUpdate AFTER UPDATE ON OutputStreamDeviceAnalog
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.OutputStreamDeviceID != NEW.OutputStreamDeviceID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'OutputStreamDeviceID', OriginalValue = OLD.OutputStreamDeviceID, NewValue = NEW.OutputStreamDeviceID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Label != NEW.Label THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Label', OriginalValue = OLD.Label, NewValue = NEW.Label, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Type != NEW.Type THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Type', OriginalValue = OLD.Type, NewValue = NEW.Type, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ScalingValue != NEW.ScalingValue THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ScalingValue', OriginalValue = OLD.ScalingValue, NewValue = NEW.ScalingValue, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER OutputStreamDeviceAnalog_AuditDelete AFTER DELETE ON OutputStreamDeviceAnalog
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'OutputStreamDeviceID', OriginalValue = OLD.OutputStreamDeviceID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Label', OriginalValue = OLD.Label, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Type', OriginalValue = OLD.Type, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ScalingValue', OriginalValue = OLD.ScalingValue, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceAnalog', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER OutputStreamDeviceDigital_AuditUpdate AFTER UPDATE ON OutputStreamDeviceDigital
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.OutputStreamDeviceID != NEW.OutputStreamDeviceID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'OutputStreamDeviceID', OriginalValue = OLD.OutputStreamDeviceID, NewValue = NEW.OutputStreamDeviceID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Label != NEW.Label THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Label', OriginalValue = OLD.Label, NewValue = NEW.Label, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.MaskValue != NEW.MaskValue THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MaskValue', OriginalValue = OLD.MaskValue, NewValue = NEW.MaskValue, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER OutputStreamDeviceDigital_AuditDelete AFTER DELETE ON OutputStreamDeviceDigital
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'OutputStreamDeviceID', OriginalValue = OLD.OutputStreamDeviceID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Label', OriginalValue = OLD.Label, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'MaskValue', OriginalValue = OLD.MaskValue, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDeviceDigital', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER OutputStreamDevicePhasor_AuditUpdate AFTER UPDATE ON OutputStreamDevicePhasor
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.OutputStreamDeviceID != NEW.OutputStreamDeviceID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'OutputStreamDeviceID', OriginalValue = OLD.OutputStreamDeviceID, NewValue = NEW.OutputStreamDeviceID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Label != NEW.Label THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Label', OriginalValue = OLD.Label, NewValue = NEW.Label, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Type != NEW.Type THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Type', OriginalValue = OLD.Type, NewValue = NEW.Type, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Phase != NEW.Phase THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Phase', OriginalValue = OLD.Phase, NewValue = NEW.Phase, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ScalingValue != NEW.ScalingValue THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ScalingValue', OriginalValue = OLD.ScalingValue, NewValue = NEW.ScalingValue, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER OutputStreamDevicePhasor_AuditDelete AFTER DELETE ON OutputStreamDevicePhasor
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'OutputStreamDeviceID', OriginalValue = OLD.OutputStreamDeviceID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Label', OriginalValue = OLD.Label, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Type', OriginalValue = OLD.Type, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Phase', OriginalValue = OLD.Phase, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ScalingValue', OriginalValue = OLD.ScalingValue, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamDevicePhasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER OutputStreamMeasurement_AuditUpdate AFTER UPDATE ON OutputStreamMeasurement
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AdapterID != NEW.AdapterID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AdapterID', OriginalValue = OLD.AdapterID, NewValue = NEW.AdapterID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.HistorianID != NEW.HistorianID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'HistorianID', OriginalValue = OLD.HistorianID, NewValue = NEW.HistorianID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.PointID != NEW.PointID THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'PointID', OriginalValue = OLD.PointID, NewValue = NEW.PointID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.SignalReference != NEW.SignalReference THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'SignalReference', OriginalValue = OLD.SignalReference, NewValue = NEW.SignalReference, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER OutputStreamMeasurement_AuditDelete AFTER DELETE ON OutputStreamMeasurement
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AdapterID', OriginalValue = OLD.AdapterID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'HistorianID', OriginalValue = OLD.HistorianID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'PointID', OriginalValue = OLD.PointID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'SignalReference', OriginalValue = OLD.SignalReference, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'OutputStreamMeasurement', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER Phasor_AuditUpdate AFTER UPDATE ON Phasor
FOR EACH ROW BEGIN
IF OLD.DeviceID != NEW.DeviceID THEN
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DeviceID', OriginalValue = OLD.DeviceID, NewValue = NEW.DeviceID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Label != NEW.Label THEN
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Label', OriginalValue = OLD.Label, NewValue = NEW.Label, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Type != NEW.Type THEN
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Type', OriginalValue = OLD.Type, NewValue = NEW.Type, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Phase != NEW.Phase THEN
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Phase', OriginalValue = OLD.Phase, NewValue = NEW.Phase, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.DestinationPhasorID != NEW.DestinationPhasorID THEN
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DestinationPhasorID', OriginalValue = OLD.DestinationPhasorID, NewValue = NEW.DestinationPhasorID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.SourceIndex != NEW.SourceIndex THEN
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'SourceIndex', OriginalValue = OLD.SourceIndex, NewValue = NEW.SourceIndex, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER Phasor_AuditDelete AFTER DELETE ON Phasor
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DeviceID', OriginalValue = OLD.DeviceID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Label', OriginalValue = OLD.Label, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Type', OriginalValue = OLD.Type, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Phase', OriginalValue = OLD.Phase, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'DestinationPhasorID', OriginalValue = OLD.DestinationPhasorID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'SourceIndex', OriginalValue = OLD.SourceIndex, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Phasor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER Alarm_AuditUpdate AFTER UPDATE ON Alarm
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.TagName != NEW.TagName THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'TagName', OriginalValue = OLD.TagName, NewValue = NEW.TagName, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.SignalID != NEW.SignalID THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'SignalID', OriginalValue = OLD.SignalID, NewValue = NEW.SignalID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AssociatedMeasurementID != NEW.AssociatedMeasurementID THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'AssociatedMeasurementID', OriginalValue = OLD.AssociatedMeasurementID, NewValue = NEW.AssociatedMeasurementID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Description != NEW.Description THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NEW.Description, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Severity != NEW.Severity THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Severity', OriginalValue = OLD.Severity, NewValue = NEW.Severity, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Operation != NEW.Operation THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Operation', OriginalValue = OLD.Operation, NewValue = NEW.Operation, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.SetPoint != NEW.SetPoint THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'SetPoint', OriginalValue = OLD.SetPoint, NewValue = NEW.SetPoint, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Tolerance != NEW.Tolerance THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Tolerance', OriginalValue = OLD.Tolerance, NewValue = NEW.Tolerance, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Delay != NEW.Delay THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Delay', OriginalValue = OLD.Delay, NewValue = NEW.Delay, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Hysteresis != NEW.Hysteresis THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Hysteresis', OriginalValue = OLD.Hysteresis, NewValue = NEW.Hysteresis, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.LoadOrder != NEW.LoadOrder THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NEW.LoadOrder, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER Alarm_AuditDelete AFTER DELETE ON Alarm
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'TagName', OriginalValue = OLD.TagName, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'SignalID', OriginalValue = OLD.SignalID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'AssociatedMeasurementID', OriginalValue = OLD.AssociatedMeasurementID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Severity', OriginalValue = OLD.Severity, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Operation', OriginalValue = OLD.Operation, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'SetPoint', OriginalValue = OLD.SetPoint, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Tolerance', OriginalValue = OLD.Tolerance, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Delay', OriginalValue = OLD.Delay, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Hysteresis', OriginalValue = OLD.Hysteresis, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'LoadOrder', OriginalValue = OLD.LoadOrder, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Alarm', PrimaryKeyColumn = 'ID', PrimaryKeyVAlue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER Vendor_AuditUpdate AFTER UPDATE ON Vendor
FOR EACH ROW BEGIN
IF OLD.Acronym != NEW.Acronym THEN
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NEW.Acronym, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.PhoneNumber != NEW.PhoneNumber THEN
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'PhoneNumber', OriginalValue = OLD.PhoneNumber, NewValue = NEW.PhoneNumber, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ContactEmail != NEW.ContactEmail THEN
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ContactEmail', OriginalValue = OLD.ContactEmail, NewValue = NEW.ContactEmail, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Url != NEW.Url THEN
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Url', OriginalValue = OLD.Url, NewValue = NEW.Url, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER Vendor_AuditDelete AFTER DELETE ON Vendor
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'PhoneNumber', OriginalValue = OLD.PhoneNumber, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ContactEmail', OriginalValue = OLD.ContactEmail, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Url', OriginalValue = OLD.Url, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Vendor', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER VendorDevice_AuditUpdate AFTER UPDATE ON VendorDevice
FOR EACH ROW BEGIN
IF OLD.VendorID != NEW.VendorID THEN
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'VendorID', OriginalValue = OLD.VendorID, NewValue = NEW.VendorID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Description != NEW.Description THEN
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NEW.Description, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Url != NEW.Url THEN
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Url', OriginalValue = OLD.Url, NewValue = NEW.Url, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER VendorDevice_AuditDelete AFTER DELETE ON VendorDevice
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'VendorID', OriginalValue = OLD.VendorID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Url', OriginalValue = OLD.Url, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'VendorDevice', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER MeasurementGroup_AuditUpdate AFTER UPDATE ON MeasurementGroup
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Description != NEW.Description THEN
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NEW.Description, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER MeasurementGroup_AuditDelete AFTER DELETE ON MeasurementGroup
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Description', OriginalValue = OLD.Description, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'MeasurementGroup', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
CREATE TRIGGER Subscriber_AuditUpdate AFTER UPDATE ON Subscriber
FOR EACH ROW BEGIN
IF OLD.NodeID != NEW.NodeID THEN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NEW.NodeID, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Acronym != NEW.Acronym THEN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NEW.Acronym, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Name != NEW.Name THEN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NEW.Name, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.SharedSecret != NEW.SharedSecret THEN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'SharedSecret', OriginalValue = OLD.SharedSecret, NewValue = NEW.SharedSecret, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.AuthKey != NEW.AuthKey THEN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AuthKey', OriginalValue = OLD.AuthKey, NewValue = NEW.AuthKey, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.ValidIPAddresses != NEW.ValidIPAddresses THEN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ValidIPAddresses', OriginalValue = OLD.ValidIPAddresses, NewValue = NEW.ValidIPAddresses, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.Enabled != NEW.Enabled THEN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NEW.Enabled, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedOn != NEW.UpdatedOn THEN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NEW.UpdatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.UpdatedBy != NEW.UpdatedBy THEN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NEW.UpdatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedBy != NEW.CreatedBy THEN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NEW.CreatedBy, UpdatedBy = NEW.UpdatedBy;
END IF;
IF OLD.CreatedOn != NEW.CreatedOn THEN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NEW.CreatedOn, UpdatedBy = NEW.UpdatedBy;
END IF;
END$$
CREATE TRIGGER Subscriber_AuditDelete AFTER DELETE ON Subscriber
FOR EACH ROW BEGIN
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'NodeID', OriginalValue = OLD.NodeID, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Acronym', OriginalValue = OLD.Acronym, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Name', OriginalValue = OLD.Name, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'SharedSecret', OriginalValue = OLD.SharedSecret, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'AuthKey', OriginalValue = OLD.AuthKey, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'ValidIPAddresses', OriginalValue = OLD.ValidIPAddresses, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'Enabled', OriginalValue = OLD.Enabled, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedOn', OriginalValue = OLD.UpdatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'UpdatedBy', OriginalValue = OLD.UpdatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedBy', OriginalValue = OLD.CreatedBy, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
INSERT INTO AuditLog SET TableName = 'Subscriber', PrimaryKeyColumn = 'ID', PrimaryKeyValue = OLD.ID, ColumnName = 'CreatedOn', OriginalValue = OLD.CreatedOn, NewValue = NULL, Deleted = '1', UpdatedBy = @context;
END$$
DELIMITER ;
| {
"content_hash": "a73a3e9435976be05aa74136b0b560e2",
"timestamp": "",
"source": "github",
"line_count": 1974,
"max_line_length": 294,
"avg_line_length": 94.23252279635258,
"alnum_prop": 0.7053248393946725,
"repo_name": "rmc00/gsf",
"id": "5abf8d91eb8865e23b10a359e405cd28d4f84414",
"size": "186015",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Source/Libraries/GSF.TimeSeries/Data/MySQL/AuditLog.sql",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "17182"
},
{
"name": "Batchfile",
"bytes": "8635"
},
{
"name": "C",
"bytes": "24478"
},
{
"name": "C#",
"bytes": "30052352"
},
{
"name": "C++",
"bytes": "135084"
},
{
"name": "CMake",
"bytes": "3519"
},
{
"name": "CSS",
"bytes": "4763"
},
{
"name": "HTML",
"bytes": "3914"
},
{
"name": "Java",
"bytes": "955418"
},
{
"name": "JavaScript",
"bytes": "1274735"
},
{
"name": "Objective-C",
"bytes": "173"
},
{
"name": "PLSQL",
"bytes": "107859"
},
{
"name": "PLpgSQL",
"bytes": "88792"
},
{
"name": "Pascal",
"bytes": "515"
},
{
"name": "SQLPL",
"bytes": "186015"
},
{
"name": "ShaderLab",
"bytes": "137"
},
{
"name": "Shell",
"bytes": "11035"
},
{
"name": "Smalltalk",
"bytes": "8510"
},
{
"name": "Visual Basic",
"bytes": "72875"
},
{
"name": "XSLT",
"bytes": "1070"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
with open('README.rst') as fp:
long_description = fp.read()
setup(
name='roviclient',
version='0.1.0',
description='A simple Python client library for the Rovi API',
long_description=long_description,
author='Devin Sevilla',
author_email='dasevilla@gmail.com',
url='https://github.com/dasevilla/rovi-python',
download_url='https://github.com/dasevilla/rovi-python/tarball/master',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
install_requires=[
'requests>=1.2.0',
],
packages=find_packages(),
)
| {
"content_hash": "6c3b55d622f74ae34f1b33a00a4ff887",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 75,
"avg_line_length": 22.71875,
"alnum_prop": 0.6354883081155434,
"repo_name": "dasevilla/rovi-python",
"id": "13341c05daa2811fcd17641f8c6c779cd92ce0ab",
"size": "727",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12750"
}
],
"symlink_target": ""
} |
package org.sagebionetworks.web.server.servlet;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
public class ServiceUtils {
public static void writeToFile(File temp, InputStream stream, final long maxAttachmentSizeBytes) throws IOException {
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(temp, false));
try {
long size = 0;
byte[] buffer = new byte[1024];
int length = 0;
while ((length = stream.read(buffer)) > 0) {
out.write(buffer, 0, length);
size += length;
if (size > maxAttachmentSizeBytes)
throw new IllegalArgumentException("File size exceeds the limit of " + maxAttachmentSizeBytes + " MB for attachments");
}
} catch (Throwable e) {
// if is any errors delete the tmp file
if (out != null) {
out.close();
}
temp.delete();
throw new RuntimeException(e);
} finally {
if (out != null) {
out.close();
}
}
}
/**
* Write the data in the passed input stream to a temp file.
*
* @param stream
* @return
* @throws IOException
*/
public static File writeToTempFile(InputStream stream, final long maxAttachmentSizeBytes) throws IOException {
File temp = File.createTempFile("tempUploadedFile", ".tmp");
writeToFile(temp, stream, maxAttachmentSizeBytes);
return temp;
}
}
| {
"content_hash": "5c19d5b0e76a2906cad3031f6b9bd618",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 124,
"avg_line_length": 29.0625,
"alnum_prop": 0.7010752688172043,
"repo_name": "jay-hodgson/SynapseWebClient",
"id": "472e0d1c02f5a13e06bf79a2a54a27e08733f1ce",
"size": "1395",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/main/java/org/sagebionetworks/web/server/servlet/ServiceUtils.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "392172"
},
{
"name": "HTML",
"bytes": "3785996"
},
{
"name": "Java",
"bytes": "7951622"
},
{
"name": "JavaScript",
"bytes": "8787050"
},
{
"name": "SCSS",
"bytes": "69093"
},
{
"name": "Shell",
"bytes": "1363"
}
],
"symlink_target": ""
} |
package net.kados.gtp.app.controllers.modules;
import java.net.URL;
import java.util.ResourceBundle;
import javafx.beans.value.ObservableValue;
import javafx.concurrent.WorkerStateEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.control.ProgressIndicator;
import javafx.scene.control.TextArea;
import net.kados.gtp.app.libs.Teryt.Parser;
import net.kados.gtp.core.SceneManager.Interfaces.SceneManagerShowable;
import net.kados.gtp.core.SceneManager.SceneManager;
import org.springframework.beans.factory.annotation.Autowired;
public class ParsingController implements Initializable, SceneManagerShowable
{
@Autowired
private SceneManager sceneManager;
@Autowired
private Parser parser;
@FXML
private ProgressIndicator progressIndicator;
@FXML
private TextArea progressPrompt;
@Override
public void initialize(URL location, ResourceBundle resources)
{
}
@Override
public void showed()
{
this.parser.messageProperty().addListener((ObservableValue<? extends String> ov, String prv, String nxt) -> {
this.progressPrompt.appendText(nxt + "\n");
});
this.parser.progressProperty().addListener((ObservableValue<? extends Number> ov, Number prv, Number nxt) -> {
this.progressIndicator.setProgress((double) nxt);
});
this.parser.addEventHandler(WorkerStateEvent.WORKER_STATE_SUCCEEDED, (EventHandler<WorkerStateEvent>) (WorkerStateEvent event) -> {
sceneManager.nestPartInScene("summary");
});
Thread thread = new Thread(this.parser);
thread.setDaemon(false);
thread.start();
}
} | {
"content_hash": "b6cbd807ee30a78b50d8977ee813d685",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 139,
"avg_line_length": 31.785714285714285,
"alnum_prop": 0.7067415730337079,
"repo_name": "Digital87/GUS-Teryt-Parser",
"id": "6e897e0d0850bec2fcf00ac230a794f90d2ab353",
"size": "1780",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/net/kados/gtp/app/controllers/modules/ParsingController.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "235"
},
{
"name": "Java",
"bytes": "58279"
}
],
"symlink_target": ""
} |
#ifndef EASYIMAGE_H_
#define EASYIMAGE_H_
#include <stdint.h>
#include <vector>
#include <iostream>
/**
* \brief The namespace of the EasyImage class
*/
namespace img
{
/**
* \brief This class represents the color of a pixel in an img::EasyImage object
*/
class Color
{
//a safety warning to all of you: Do *NOT* rearrange the 'color components' in this class
//easyimage expects these three fields to be the *first* fields in the class AND expects
//them to be in *this* order
//if you alter the arrangement, the generated BMP files will contain garbage
public:
/**
* \brief The intensity of the blue color component
*/
uint8_t blue;
/**
* \brief The intensity of the green color component
*/
uint8_t green;
/**
* \brief The intensity of the red color component
*/
uint8_t red;
/**
* \brief Default Constructor
*/
Color();
/**
* \brief Constructs a Color with the given intensities
*
* \param r The red color component
* \param g The green color component
* \param b The blue color component
*
*/
Color(uint8_t r, uint8_t g, uint8_t b);
/**
* Destructor
*/
~Color();
};
/**
* \brief The exception that is thrown when an error occurs while trying to read an img::EasyImage from an input stream
*/
class UnsupportedFileTypeException: public std::exception
{
private:
/**
* \brief Message explaining what went wrong
*/
std::string message;
public:
/**
* \brief Construct an exception with the given message
*
* \param msg The message explaining what went wrong
*
*/
UnsupportedFileTypeException(std::string const& msg);
/**
* \brief Copy Constructor
*
* \param original The exception to be copied into this object
*/
UnsupportedFileTypeException(const UnsupportedFileTypeException &original);
/**
* \brief Destructor
*/
virtual ~UnsupportedFileTypeException() throw ();
/**
* \brief Assignment operator
*
* \param original The original exception to be assigned to this one
*/
UnsupportedFileTypeException& operator=(const UnsupportedFileTypeException &original);
/**
* \brief Returns a description of the error hat occurred.
*
* \return A description of the error hat occurred.
*/
virtual const char *what() const throw ();
};
/**
* \brief This class implements a 'minor' image-library that supports basic operations such as setting and retrieving a pixel, and drawing a line.
*/
class EasyImage
{
public:
/**
* \brief Default Constructor. Creates a zero-pixel image
*/
EasyImage();
/**
* \brief Constructor: creates a new EasyImage of the specified width and height
*
* \param width the width of the image
* \param height the height of the image
* \param color (optional) the background color of the image
*/
EasyImage(unsigned int width, unsigned int height, Color color = Color());
/**
* \brief Copy Constructor
*
* \param img the image to be copied
*/
EasyImage(EasyImage const& img);
/**
* \brief Destructor
*/
virtual ~EasyImage();
/**
* \brief Assignment operator. Allows an easyImage to be assigned to another easyImage
*
* \param img The image to be assigned to this image
*/
EasyImage& operator=(EasyImage const& img);
/**
* \brief Returns the width of the image
*
* \return the width of the image
*/
unsigned int get_width() const;
/**
* \brief Returns the height of the image
* \return the height of the image
*/
unsigned int get_height() const;
/**
* \brief Function operator. This operator returns a reference to a particular pixel of the image.
*
* \param x the x coordinate of the pixel
* \param y the y coordinate of the pixel
*
* These assertions apply:
* assert(x>=0 && x < getWidth())
* assert(y>=0 && y < getHeight())
*/
Color& operator()(unsigned int x, unsigned int y);
/**
* \brief Function operator. This operator returns a const reference to a particular pixel of the image.
*
* \param x the x coordinate of the pixel
* \param y the y coordinate of the pixel
*
* These assertions apply:
* assert(x>=0 && x < getWidth())
* assert(y>=0 && y < getHeight())
*/
Color const& operator()(unsigned int x, unsigned int y) const;
/**
* \brief Fills the image with a background of a specified color. Defaults to black
*
* \param color The color to be assigned to each pixel
*/
void clear(Color color = Color());
/**
* \brief Draws a line from pixel (x0,y0) to pixel (x1,y1) in the specified color
*
* \param x0 the x coordinate of the first pixel
* \param y0 the y coordinate of the first pixel
* \param x1 the x coordinate of the second pixel
* \param y1 the y coordinate of the second pixel
* \param color the color of the line
*
* These assertions apply:
* assert(x0 < getWidth())
* assert(y0 < getHeight())
* assert(x1 < getWidth())
* assert(y1 < getHeight())
*/
void draw_line(unsigned int x0, unsigned int y0, unsigned int x1, unsigned int y1, Color color);
private:
friend std::istream& operator>>(std::istream& in, EasyImage & image);
/**
* \brief the width of the image
*/
unsigned int width;
/**
* \brief the height of the image
*/
unsigned int height;
/**
* \brief the vector containing all pixels
*/
std::vector<Color> bitmap;
};
/**
* \brief Writes an img::EasyImage to an output stream in the BMP file format
*
* \param out the std::ostream to write the BMP file to.
* \param image the img::EasyImage to be written to the output stream
*
* \return a reference to the output stream the image was written to
*/
std::ostream& operator<<(std::ostream& out, EasyImage const& image);
/**
* \brief Reads an img::EasyImage from an input stream.
*
* Please note: at this point only a limited subset of BMP-file format is supported.
* In order to correctly read a BMP file it must:
* - Be an uncompressed bitmap
* - Only contain one plane
* - Use 24bits/pixel
* If the BMP file-format is not supported an UnsupportedFileTypeException is thrown
*
* \param in the input stream to read the bitmap from
* \param image the EasyImage object in which the bitmap must be stored
*
* \return a reference to the input stream from which the bitmap was read
*/
std::istream& operator>>(std::istream& in, EasyImage& image);
}
#endif /* EASYIMAGE_H_ */
| {
"content_hash": "c514c6c4e594a1fcb168830ab4e7a17f",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 147,
"avg_line_length": 27.728,
"alnum_prop": 0.620600115406809,
"repo_name": "timvdm/ComputerGraphics",
"id": "52e4d5b76950c57a247d4d72565bf2e81a953a4e",
"size": "7646",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "libgfx/EasyImage.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1487815"
},
{
"name": "C++",
"bytes": "1242943"
},
{
"name": "Objective-C",
"bytes": "2285"
},
{
"name": "Python",
"bytes": "592"
}
],
"symlink_target": ""
} |
// Copyright 2017 The Cloudprober Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package servers provides an interface to initialize cloudprober servers using servers config.
*/
package servers
import (
"context"
"html/template"
"github.com/cloudprober/cloudprober/logger"
"github.com/cloudprober/cloudprober/metrics"
"github.com/cloudprober/cloudprober/servers/external"
"github.com/cloudprober/cloudprober/servers/grpc"
"github.com/cloudprober/cloudprober/servers/http"
configpb "github.com/cloudprober/cloudprober/servers/proto"
"github.com/cloudprober/cloudprober/servers/udp"
"github.com/cloudprober/cloudprober/web/formatutils"
)
// StatusTmpl variable stores the HTML template suitable to generate the
// servers' status for cloudprober's /status page. It expects an array of
// ServerInfo objects as input.
var StatusTmpl = template.Must(template.New("statusTmpl").Parse(`
<table class="status-list">
<tr>
<th>Type</th>
<th>Conf</th>
</tr>
{{ range . }}
<tr>
<td>{{.Type}}</td>
<td>
{{if .Conf}}
<pre>{{.Conf}}</pre>
{{else}}
default
{{end}}
</td>
</tr>
{{ end }}
</table>
`))
// Server interface has only one method: Start.
type Server interface {
Start(ctx context.Context, dataChan chan<- *metrics.EventMetrics) error
}
// ServerInfo encapsulates a Server and related info.
type ServerInfo struct {
Server
Type string
Conf string
}
// Init initializes cloudprober servers, based on the provided config.
func Init(initCtx context.Context, serverDefs []*configpb.ServerDef) (servers []*ServerInfo, err error) {
for _, serverDef := range serverDefs {
var l *logger.Logger
l, err = logger.NewCloudproberLog(serverDef.GetType().String())
if err != nil {
return
}
var conf interface{}
var server Server
switch serverDef.GetType() {
case configpb.ServerDef_HTTP:
server, err = http.New(initCtx, serverDef.GetHttpServer(), l)
conf = serverDef.GetHttpServer()
case configpb.ServerDef_UDP:
server, err = udp.New(initCtx, serverDef.GetUdpServer(), l)
conf = serverDef.GetUdpServer()
case configpb.ServerDef_GRPC:
server, err = grpc.New(initCtx, serverDef.GetGrpcServer(), l)
conf = serverDef.GetGrpcServer()
case configpb.ServerDef_EXTERNAL:
server, err = external.New(initCtx, serverDef.GetExternalServer(), l)
conf = serverDef.GetExternalServer()
}
if err != nil {
return
}
servers = append(servers, &ServerInfo{
Server: server,
Type: serverDef.GetType().String(),
Conf: formatutils.ConfToString(conf),
})
}
return
}
| {
"content_hash": "fa61933265680e7d3305bc3b4e796bf2",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 105,
"avg_line_length": 28.981308411214954,
"alnum_prop": 0.7158980973879394,
"repo_name": "cloudprober/cloudprober",
"id": "b582b16bd2dfb65d1f2b99afccb4e579d2b1acd8",
"size": "3101",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "servers/servers.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1153"
},
{
"name": "CUE",
"bytes": "72086"
},
{
"name": "Dockerfile",
"bytes": "2924"
},
{
"name": "Go",
"bytes": "1061929"
},
{
"name": "JavaScript",
"bytes": "5106"
},
{
"name": "Makefile",
"bytes": "4171"
},
{
"name": "Shell",
"bytes": "8380"
}
],
"symlink_target": ""
} |
package uk.ac.ncl.openlab.intake24.foodsql.shared
import anorm.NamedParameter.symbol
import anorm.{Macro, SQL, SqlParser, sqlToSimple}
import uk.ac.ncl.openlab.intake24.api.data.admin.CategoryHeader
import uk.ac.ncl.openlab.intake24.errors.{LocalLookupError, LookupError}
import uk.ac.ncl.openlab.intake24.foodsql.admin.HeaderRows
import uk.ac.ncl.openlab.intake24.foodsql.{FirstRowValidation, FirstRowValidationClause}
import uk.ac.ncl.openlab.intake24.sql.SqlResourceLoader
trait SuperCategoriesQueries extends FirstRowValidation with HeaderRows with SqlResourceLoader {
private lazy val foodAllCategoriesCodesQuery = sqlFromResource("shared/food_all_categories_codes_frv.sql")
protected def getFoodAllCategoriesCodesQuery(code: String)(implicit conn: java.sql.Connection): Either[LookupError, Set[String]] = {
val result = SQL(foodAllCategoriesCodesQuery).on('food_code -> code).executeQuery()
parseWithFoodValidation(code, result, SqlParser.str("code").+)(Seq(FirstRowValidationClause("code", () => Right(List())))).right.map(_.toSet)
}
private lazy val foodAllCategoriesHeadersQuery = sqlFromResource("shared/food_all_categories_headers_frv.sql")
protected def getFoodAllCategoriesHeadersQuery(code: String, locale: String)(implicit conn: java.sql.Connection): Either[LocalLookupError, Seq[CategoryHeader]] = {
val result = SQL(foodAllCategoriesHeadersQuery).on('food_code -> code, 'locale_id -> locale).executeQuery()
parseWithLocaleAndFoodValidation(code, result, Macro.namedParser[CategoryHeaderRow].+)(Seq(FirstRowValidationClause("code", () => Right(List())))).right.map(_.map(_.asCategoryHeader))
}
private lazy val categoryAllCategoriesCodesQuery = sqlFromResource("shared/categories_all_categories_codes_frv.sql")
def getCategoryAllCategoriesCodesQuery(code: String)(implicit conn: java.sql.Connection): Either[LookupError, Set[String]] = {
val result = SQL(categoryAllCategoriesCodesQuery).on('category_code -> code).executeQuery()
parseWithCategoryValidation(code, result, SqlParser.str("code").+)(Seq(FirstRowValidationClause("code", () => Right(List())))).right.map(_.toSet)
}
private lazy val categoryAllCategoriesHeadersQuery = sqlFromResource("shared/categories_all_categories_headers_frv.sql")
def getCategoryAllCategoriesHeadersQuery(code: String, locale: String)(implicit conn: java.sql.Connection): Either[LocalLookupError, Seq[CategoryHeader]] = {
val result = SQL(categoryAllCategoriesHeadersQuery).on('category_code -> code, 'locale_id -> locale).executeQuery()
parseWithLocaleAndCategoryValidation(code, result, Macro.namedParser[CategoryHeaderRow].+)(Seq(FirstRowValidationClause("code", () => Right(List())))).right.map(_.map(_.asCategoryHeader))
}
}
| {
"content_hash": "163b38a9700ef6e6f27e4e36b1586f9f",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 191,
"avg_line_length": 62.52272727272727,
"alnum_prop": 0.7826245001817521,
"repo_name": "digitalinteraction/intake24",
"id": "c6484a028cc1cba0839da0a983c9dc163b0b741f",
"size": "2751",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "FoodDataSQL/src/main/scala/uk/ac/ncl/openlab/intake24/foodsql/shared/SuperCategoriesQueries.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "773"
},
{
"name": "HTML",
"bytes": "11542"
},
{
"name": "Java",
"bytes": "341144"
},
{
"name": "JavaScript",
"bytes": "1694"
},
{
"name": "Scala",
"bytes": "1314742"
}
],
"symlink_target": ""
} |
<resources>
<string name="app_name">TestLeanCloud06</string>
</resources>
| {
"content_hash": "059ed2d7955c520efa0af540b94b8c1d",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 52,
"avg_line_length": 26,
"alnum_prop": 0.717948717948718,
"repo_name": "ahjsrhj/EveryDayBuild",
"id": "1301a7aefd21f6225de9e9d7a077c80291dc27d6",
"size": "78",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "TestLeanCloud06/app/src/main/res/values/strings.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "442778"
},
{
"name": "Lua",
"bytes": "1712"
}
],
"symlink_target": ""
} |
Metova Mandrill configures ActionMailer to use Mandrill by inspecting `ENV` for your Mandrill configuration options.
## Installation
Add this line to your application's Gemfile:
gem 'metova-mandrill'
And then execute:
$ bundle
Or install it yourself as:
$ gem install metova-mandrill
## Usage
Set the following ENV variables:
```ruby
ENV['MANDRILL_DOMAIN']
ENV['MANDRILL_DEFAULT_HOST']
ENV['MANDRILL_USERNAME']
ENV['MANDRILL_PASSWORD']
```
Your app is now configured to send e-mail via Mandrill. If `ENV['MANDRILL_DEFAULT_HOST']` is not set, `ENV['MANDRILL_DOMAIN']`
will be used for `default_url_options`. If `ENV['MANDRILL_DOMAIN']` is not set, the entire configuration will be skipped. This is
useful in the test/development environment.
## Contributing
1. Fork it ( https://github.com/[my-github-username]/metova-mandrill/fork )
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create a new Pull Request
| {
"content_hash": "51eba38d8d383339d898034a80f5a05f",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 129,
"avg_line_length": 28.157894736842106,
"alnum_prop": 0.7392523364485981,
"repo_name": "metova/metova-mandrill",
"id": "6049d923bcbfdd01804479e43d5eabc821cab708",
"size": "1089",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "2066"
}
],
"symlink_target": ""
} |
namespace remoting {
namespace protocol {
class ChromiumPortAllocatorFactory : public PortAllocatorFactory {
public:
ChromiumPortAllocatorFactory();
~ChromiumPortAllocatorFactory() override;
// PortAllocatorFactory interface.
std::unique_ptr<cricket::PortAllocator> CreatePortAllocator(
scoped_refptr<TransportContext> transport_context,
base::WeakPtr<SessionOptionsProvider> session_options_provider) override;
private:
DISALLOW_COPY_AND_ASSIGN(ChromiumPortAllocatorFactory);
};
} // namespace protocol
} // namespace remoting
#endif // REMOTING_PROTOCOL_CHROMIUM_PORT_ALLOCATOR_FACTORY_H_
| {
"content_hash": "f0eb2d8ffee17dac32f6f4e97370a25b",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 79,
"avg_line_length": 29.714285714285715,
"alnum_prop": 0.7852564102564102,
"repo_name": "endlessm/chromium-browser",
"id": "b422a6cf76503e591c9cf919bc3bb3b49c7c7127",
"size": "1064",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "remoting/protocol/chromium_port_allocator_factory.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
/******************************************************************
Change Script 3.0.10.86
1. update pmt_contacts to adhere to db version 3.0.10
******************************************************************/
INSERT INTO version(_version, _iteration, _changeset) VALUES (3.0, 10, 86);
-- select * from version order by _iteration desc, _changeset desc;
/******************************************************************
1. update pmt_contacts to include contact id
select * from pmt_contacts();
******************************************************************/
CREATE OR REPLACE FUNCTION pmt_contacts() RETURNS SETOF pmt_json_result_type AS $$
DECLARE
rec record;
BEGIN
FOR rec IN ( SELECT row_to_json(j) FROM (
SELECT c.id, _first_name, _last_name, _title, _email, organization_id as o_id,
(SELECT _name FROM organization where id = c.organization_id and _active = true) as org,
(SELECT array_agg(activity_id) FROM activity_contact WHERE contact_id = c.id) as activities
FROM contact c
WHERE _active = true
ORDER BY _last_name, _first_name) j
) LOOP
RETURN NEXT rec;
END LOOP;
END;$$ LANGUAGE plpgsql;
-- update permissions
GRANT USAGE ON SCHEMA public TO pmt_read;
GRANT SELECT ON ALL TABLES IN SCHEMA public TO pmt_read;
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO pmt_read;
GRANT USAGE ON SCHEMA public TO pmt_write;
GRANT SELECT,INSERT ON ALL TABLES IN SCHEMA public TO pmt_write;
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO pmt_write; | {
"content_hash": "8232255a1e00f053562f3790cfe99082",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 92,
"avg_line_length": 41.75,
"alnum_prop": 0.5994677312042581,
"repo_name": "spatialdev/PMT",
"id": "cd91391e17b81dab3f80d5a3a647ddcdf048b431",
"size": "1505",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Database/ChangeScripts/3.0.10/cs_.3.0.10.86.sql",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "301770"
},
{
"name": "HTML",
"bytes": "487064"
},
{
"name": "JavaScript",
"bytes": "1657115"
},
{
"name": "PLpgSQL",
"bytes": "4353759"
},
{
"name": "Python",
"bytes": "539"
},
{
"name": "Shell",
"bytes": "6498"
},
{
"name": "Smarty",
"bytes": "798"
}
],
"symlink_target": ""
} |
layout: model
title: English RobertaForQuestionAnswering (from huxxx657)
author: John Snow Labs
name: roberta_qa_roberta_base_finetuned_scrambled_squad_10
date: 2022-06-20
tags: [en, open_source, question_answering, roberta]
task: Question Answering
language: en
edition: Spark NLP 4.0.0
spark_version: 3.0
supported: true
annotator: RoBertaForQuestionAnswering
article_header:
type: cover
use_language_switcher: "Python-Scala-Java"
---
## Description
Pretrained Question Answering model, adapted from Hugging Face and curated to provide scalability and production-readiness using Spark NLP. `roberta-base-finetuned-scrambled-squad-10` is a English model originally trained by `huxxx657`.
{:.btn-box}
<button class="button button-orange" disabled>Live Demo</button>
<button class="button button-orange" disabled>Open in Colab</button>
[Download](https://s3.amazonaws.com/auxdata.johnsnowlabs.com/public/models/roberta_qa_roberta_base_finetuned_scrambled_squad_10_en_4.0.0_3.0_1655733955391.zip){:.button.button-orange.button-orange-trans.arr.button-icon}
## How to use
<div class="tabs-box" markdown="1">
{% include programmingLanguageSelectScalaPythonNLU.html %}
```python
document_assembler = MultiDocumentAssembler() \
.setInputCols(["question", "context"]) \
.setOutputCols(["document_question", "document_context"])
spanClassifier = RoBertaForQuestionAnswering.pretrained("roberta_qa_roberta_base_finetuned_scrambled_squad_10","en") \
.setInputCols(["document_question", "document_context"]) \
.setOutputCol("answer") \
.setCaseSensitive(True)
pipeline = Pipeline().setStages([
document_assembler,
spanClassifier
])
example = spark.createDataFrame([["What's my name?", "My name is Clara and I live in Berkeley."]]).toDF("question", "context")
result = pipeline.fit(example).transform(example)
```
```scala
val document = new MultiDocumentAssembler()
.setInputCols("question", "context")
.setOutputCols("document_question", "document_context")
val spanClassifier = RoBertaForQuestionAnswering
.pretrained("roberta_qa_roberta_base_finetuned_scrambled_squad_10","en")
.setInputCols(Array("document_question", "document_context"))
.setOutputCol("answer")
.setCaseSensitive(true)
.setMaxSentenceLength(512)
val pipeline = new Pipeline().setStages(Array(document, spanClassifier))
val example = Seq(
("Where was John Lenon born?", "John Lenon was born in London and lived in Paris. My name is Sarah and I live in London."),
("What's my name?", "My name is Clara and I live in Berkeley."))
.toDF("question", "context")
val result = pipeline.fit(example).transform(example)
```
{:.nlu-block}
```python
import nlu
nlu.load("en.answer_question.squad.roberta.base_scrambled_10.by_huxxx657").predict("""What's my name?|||"My name is Clara and I live in Berkeley.""")
```
</div>
{:.model-param}
## Model Information
{:.table-model}
|---|---|
|Model Name:|roberta_qa_roberta_base_finetuned_scrambled_squad_10|
|Compatibility:|Spark NLP 4.0.0+|
|License:|Open Source|
|Edition:|Official|
|Input Labels:|[question, context]|
|Output Labels:|[answer]|
|Language:|en|
|Size:|463.8 MB|
|Case sensitive:|true|
|Max sentence length:|512|
## References
- https://huggingface.co/huxxx657/roberta-base-finetuned-scrambled-squad-10 | {
"content_hash": "ad6d9a82dcaba44aca22c650c516237c",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 236,
"avg_line_length": 32.01980198019802,
"alnum_prop": 0.756338899196042,
"repo_name": "JohnSnowLabs/spark-nlp",
"id": "c86d90386169dfde0ff9dfc72d606f0c060fd4e8",
"size": "3238",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/_posts/gadde5300/2022-06-20-roberta_qa_roberta_base_finetuned_scrambled_squad_10_en_3_0.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "14452"
},
{
"name": "Java",
"bytes": "223289"
},
{
"name": "Makefile",
"bytes": "819"
},
{
"name": "Python",
"bytes": "1694517"
},
{
"name": "Scala",
"bytes": "4116435"
},
{
"name": "Shell",
"bytes": "5286"
}
],
"symlink_target": ""
} |
using System.Resources;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("CodeTorch.Mobile.Security")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("CodeTorch.Mobile.Security")]
[assembly: AssemblyCopyright("Copyright © 2014")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
[assembly: NeutralResourcesLanguage("en")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| {
"content_hash": "9b9be4084f00abfd241a0f79299c300d",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 84,
"avg_line_length": 36.833333333333336,
"alnum_prop": 0.746606334841629,
"repo_name": "EminentTechnology/CodeTorch",
"id": "e16578eafb9cce2dd705055ca24dc145082e1b3d",
"size": "1108",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/mobile/CodeTorch.Mobile.Security/Properties/AssemblyInfo.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP.NET",
"bytes": "5334"
},
{
"name": "C#",
"bytes": "2377371"
},
{
"name": "XSLT",
"bytes": "637"
}
],
"symlink_target": ""
} |
PathFinding::PathFinding()
{
m_initializedStartGoal = false;
m_foundGoal = false;
}
PathFinding::~PathFinding()
{
}
void PathFinding::Init(Grid2D* grid) {
m_grid = grid;
}
bool PathFinding::FindPath(sf::Vector2f currentPos, sf::Vector2f targetPos) {
currentPos.x = currentPos.x / m_grid->GetSquareSize();
currentPos.y = currentPos.y / m_grid->GetSquareSize();
targetPos.x = targetPos.x / m_grid->GetSquareSize();
targetPos.y = targetPos.y / m_grid->GetSquareSize();
if(!m_initializedStartGoal) {
for(unsigned int i = 0; i < m_openList.size(); i++) {
delete m_openList.at(i);
}
m_openList.clear();
for(unsigned int i = 0; i < m_visitedList.size(); i++) {
delete m_visitedList.at(i);
}
m_visitedList.clear();
for(unsigned int i = 0; i < m_pathToGoal.size(); i++) {
delete m_pathToGoal.at(i);
}
m_pathToGoal.clear();
SearchCell start;
start.m_xcoord = static_cast<int>(currentPos.x);
start.m_ycoord = static_cast<int>(currentPos.y);
SearchCell goal;
goal.m_xcoord = static_cast<int>(targetPos.x);
goal.m_ycoord = static_cast<int>(targetPos.y);
SetStartAndGoal(start, goal);
m_initializedStartGoal = true;
}
if(m_initializedStartGoal) {
ContinuePath();
}
if(m_openList.size() == 0 && m_visitedList.size() > 0) {
return false;
}
return true;
}
void PathFinding::SetStartAndGoal(SearchCell start, SearchCell goal) {
m_startCell = new SearchCell(start.m_xcoord, start.m_ycoord, NULL);
m_goalCell = new SearchCell(goal.m_xcoord, goal.m_ycoord, &goal);
m_startCell->m_g = 0;
m_startCell->m_h = m_startCell->ManHattanDistance(m_goalCell);
m_startCell->mp_parent = 0;
m_openList.push_back(m_startCell);
}
SearchCell* PathFinding::GetNextCell() {
float bestF = 9999999.0f;
int cellIndex = -1;
SearchCell* nextCell = NULL;
for(unsigned int i = 0; i < m_openList.size(); i++) {
if(m_openList.at(i)->GetF() < bestF) {
bestF = m_openList.at(i)->GetF();
cellIndex = i;
}
}
if(cellIndex >= 0) {
nextCell = m_openList.at(cellIndex);
m_visitedList.push_back(nextCell);
m_openList.erase(m_openList.begin() + cellIndex);
}
return nextCell;
}
void PathFinding::PathOpened(int x, int y, float newCost, SearchCell* p_parent) {
if(!m_grid->Walkable(x, y)) {
return;
}
int id = y * WORLD_SIZE + x;
for(unsigned int i = 0; i < m_visitedList.size(); i++) {
if(id == m_visitedList.at(i)->m_id) {
return;
}
}
SearchCell* newChild = new SearchCell(x, y, p_parent);
newChild->m_g = newCost;
newChild->m_h = newChild->ManHattanDistance(m_goalCell);
for(unsigned int i = 0; i < m_openList.size(); i++) {
if(id == m_openList.at(i)->m_id) {
float newF = newChild->m_g + m_openList.at(i)->m_h;
if(m_openList.at(i)->GetF() > newF) {
m_openList.at(i)->m_g = newChild->m_g + newCost;
m_openList.at(i)->mp_parent = newChild;
}
else {
delete newChild;
return;
}
}
}
m_openList.push_back(newChild);
}
void PathFinding::ContinuePath() {
if(m_openList.empty()) {
return;
}
SearchCell* currentCell = GetNextCell();
if(currentCell->m_id == m_goalCell->m_id) {
m_goalCell->mp_parent = currentCell->mp_parent;
SearchCell* getPath;
for(getPath = m_goalCell; getPath != NULL; getPath = getPath->mp_parent) {
m_pathToGoal.push_back(new sf::Vector2f(static_cast<float>(getPath->m_xcoord), static_cast<float>(getPath->m_ycoord)));
}
//FixGoalPath();
m_foundGoal = true;
return;
}
else {
//right
PathOpened(currentCell->m_xcoord + 1, currentCell->m_ycoord, currentCell->m_g + 1, currentCell);
//left
PathOpened(currentCell->m_xcoord - 1, currentCell->m_ycoord, currentCell->m_g + 1, currentCell);
//down
PathOpened(currentCell->m_xcoord, currentCell->m_ycoord + 1, currentCell->m_g + 1, currentCell);
//up
PathOpened(currentCell->m_xcoord, currentCell->m_ycoord - 1, currentCell->m_g + 1, currentCell);
//left_down diagonal
if(m_grid->Walkable(currentCell->m_xcoord - 1, currentCell->m_ycoord) && m_grid->Walkable(currentCell->m_xcoord, currentCell->m_ycoord + 1)) {
PathOpened(currentCell->m_xcoord - 1, currentCell->m_ycoord + 1, currentCell->m_g + 1.4, currentCell);
}
//right_down diagonal
if(m_grid->Walkable(currentCell->m_xcoord + 1, currentCell->m_ycoord) && m_grid->Walkable(currentCell->m_xcoord, currentCell->m_ycoord + 1)) {
PathOpened(currentCell->m_xcoord + 1, currentCell->m_ycoord + 1, currentCell->m_g + 1.4, currentCell);
}
//left_up diagonal
if(m_grid->Walkable(currentCell->m_xcoord - 1, currentCell->m_ycoord) && m_grid->Walkable(currentCell->m_xcoord, currentCell->m_ycoord - 1)) {
PathOpened(currentCell->m_xcoord - 1, currentCell->m_ycoord - 1, currentCell->m_g + 1.4, currentCell);
}
//right_up diagonal
if(m_grid->Walkable(currentCell->m_xcoord + 1, currentCell->m_ycoord) && m_grid->Walkable(currentCell->m_xcoord, currentCell->m_ycoord - 1)) {
PathOpened(currentCell->m_xcoord + 1, currentCell->m_ycoord - 1, currentCell->m_g + 1.4, currentCell);
}
for(unsigned int i = 0; i < m_openList.size(); i++) {
if(currentCell->m_id == m_openList.at(i)->m_id) {
m_openList.erase(m_openList.begin() + i);
}
}
}
}
sf::Vector2f PathFinding::NextPathPos(sf::Vector2f pos, float radius) {
sf::Vector2f nextPos = pos;
if(m_pathToGoal.size() > 0) {
pos.x /= m_grid->GetSquareSize();
pos.y /= m_grid->GetSquareSize();
radius /= m_grid->GetSquareSize();
unsigned int index = 1;
nextPos = *m_pathToGoal.at(m_pathToGoal.size() - index);
sf::Vector2f distance = nextPos - pos;
if(m_pathToGoal.size() > 0) {
if(sqrtf(distance.x * distance.x + distance.y * distance.y) < radius) {
m_pathToGoal.erase(m_pathToGoal.end() - index);
}
}
nextPos.x = nextPos.x * m_grid->GetSquareSize();
nextPos.y = nextPos.y * m_grid->GetSquareSize();
}
return nextPos;
}
void PathFinding::Draw(sf::RenderWindow* window) {
sf::RectangleShape rec(sf::Vector2f(30, 30));
rec.setFillColor(sf::Color(150, 150, 150));
for(unsigned int i = 0; i < this->m_pathToGoal.size(); i++) {
rec.setPosition((*this->m_pathToGoal.at(i)).x * m_grid->GetSquareSize(), (*this->m_pathToGoal.at(i)).y * m_grid->GetSquareSize());
window->draw(rec);
}
window->display();
}
void PathFinding::FixGoalPath() {
unsigned int currentPosition = 0;
unsigned int middlePosition = 1;
unsigned int nextPosition = 2;
if(m_pathToGoal.size() > 2) {
while(true) {
if( (( m_pathToGoal.at(currentPosition)->x == m_pathToGoal.at(nextPosition)->x ) && (m_pathToGoal.at(currentPosition)->x == m_pathToGoal.at(middlePosition)->x ))
|| ((m_pathToGoal.at(currentPosition)->y == m_pathToGoal.at(nextPosition)->y ) && (m_pathToGoal.at(currentPosition)->y == m_pathToGoal.at(middlePosition)->y ))) {
delete m_pathToGoal.at(middlePosition);
m_pathToGoal.erase(m_pathToGoal.begin() + middlePosition);
}
else {
currentPosition = nextPosition;
middlePosition = currentPosition + 1;
nextPosition = currentPosition + 2;
}
if(nextPosition >= m_pathToGoal.size()) {
break;
}
}
}
if(m_pathToGoal.size() > 1) {
delete m_pathToGoal.at(m_pathToGoal.size() - 1);
m_pathToGoal.erase(m_pathToGoal.end() - 1);
}
} | {
"content_hash": "aadb2fa45a0b312231455985546df4ab",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 166,
"avg_line_length": 29.1260162601626,
"alnum_prop": 0.6604326587578506,
"repo_name": "Sephia/petulant-octo-cyril",
"id": "388a6361c45feb0e084d7e826800b8ac2d40a39a",
"size": "7253",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Escape_V2/Escape_V2/PathFinding.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5780"
},
{
"name": "C++",
"bytes": "1185416"
},
{
"name": "CSS",
"bytes": "21562"
},
{
"name": "JavaScript",
"bytes": "2140"
},
{
"name": "Objective-C",
"bytes": "693"
}
],
"symlink_target": ""
} |
function Get-AzureRMWebAppARM
{
param([String] [Parameter(Mandatory = $true)] $Name)
Write-Verbose "[Azure Call] Getting azure webapp details for webapp with name : $Name "
$azureWebApp = Get-AzureRMWebApp -Name $Name
if( $azureWebApp.Count -eq 0 )
{
Throw (Get-LocalizedString -Key "Web App: '{0}' not found." -ArgumentList $Name)
}
return $azureWebApp
}
function Get-AzureRMWebAppPublishUrlARM
{
param([String][Parameter(Mandatory=$true)] $webAppName,
[String][Parameter(Mandatory=$true)] $deployToSlotFlag,
[String][Parameter(Mandatory=$false)] $resourceGroupName,
[String][Parameter(Mandatory=$false)] $slotName)
if( $deployToSlotFlag -eq $false )
{
$resourceGroupName = Get-WebAppRGName -webAppName $webAppName
Write-Verbose "Getting azure webapp info for webapp with name : $Name "
$azureRMWebAppProfileDetails = Get-AzureRMWebAppProfileForMSDeployWithProductionSlot -webAppName $webAppName -resourceGroupName $resourceGroupName
Write-Verbose "Got azure webapp info for webapp with name : $Name "
}
else
{
Write-Verbose "Getting azure webapp slot info for webapp with name : $Name , slot : $slotName and resource group : $resourceGroupName"
$azureRMWebAppProfileDetails = Get-AzureRMWebAppProfileForMSDeployWithSpecificSlot -webAppName $webAppName -slotName $slotName -resourceGroupName $resourceGroupName
Write-Verbose "Got azure webapp slot info for webapp with name : $Name , slot : $slotName and resource group : $resourceGroupName"
}
if( $azureRMWebAppProfileDetails -eq $null ){
Throw (Get-LocalizedString -Key "Unable to find webapp publish profile details for webapp {0}." -ArgumentList $webAppName)
}
return $azureRMWebAppProfileDetails.destinationAppUrl
}
function Get-WebAppRGName
{
param([String] [Parameter(Mandatory = $true)] $webAppName)
$ARMSqlServerResourceType = "Microsoft.Web/sites"
try
{
Write-Verbose "[Azure Call] Getting resource details for webapp resource: $webAppName with resource type: $ARMSqlServerResourceType"
$azureWebAppResourceDetails = (Get-AzureRmResource -ErrorAction Stop) | Where-Object { $_.ResourceType -eq $ARMSqlServerResourceType -and $_.ResourceName -eq $webAppName}
Write-Verbose "[Azure Call] Retrieved resource details successfully for webapp resource: $webAppName with resource type: $ARMSqlServerResourceType"
$azureResourceGroupName = $azureWebAppResourceDetails.ResourceGroupName
return $azureWebAppResourceDetails.ResourceGroupName
}
finally
{
if ([string]::IsNullOrEmpty($azureResourceGroupName))
{
Write-Verbose "[Azure Call] Web App: $webAppName not found"
Throw (Get-LocalizedString -Key "Web App: '{0}' not found." -ArgumentList $webAppName)
}
}
}
# return azure webapp publish profile
function Get-AzureRMWebAppPublishingProfileARM
{
param([String] [Parameter(Mandatory = $true)] $Name,
[String] [Parameter(Mandatory = $true)] $ResourceGroupName,
[String] [Parameter(Mandatory = $true)] $pubXmlFile)
Write-Verbose "[Azure Call] Getting webapp publish profile for azureRM webapp : $Name "
$publishProfileContent = Get-AzureRMWebAppPublishingProfile -Name $Name -ResourceGroupName $resourceGroupName -OutputFile $pubXmlFile
return $publishProfileContent
}
# return azure webapp slot publish profile
function Get-AzureRMWebAppSlotPublishingProfileARM
{
param([String] [Parameter(Mandatory = $true)] $Name,
[String] [Parameter(Mandatory = $true)] $ResourceGroupName,
[String] [Parameter(Mandatory = $true)] $slotName,
[String] [Parameter(Mandatory = $true)] $pubXmlFile)
Write-Verbose "[Azure Call] Getting publish profile file for azureRM WebApp:'$Name' for Slot:'$slotName'"
$publishProfileContent = Get-AzureRMWebAppSlotPublishingProfile -Name $Name -ResourceGroupName $resourceGroupName -Slot $slotName -OutputFile $pubXmlFile
return $publishProfileContent
}
function Construct-AzureWebAppConnectionObject
{
param([String][Parameter(Mandatory=$true)] $kuduHostName,
[Object][Parameter(Mandatory=$true)] $webAppProfileForMSDeploy)
# Get userName and userPassword to access kuduServer
$userName = $webAppProfileForMSDeploy.userName
$userPassword = $webAppProfileForMSDeploy.userPWD
Write-Verbose "`t Username is:'$userName' to access KuduHostName:'$kuduHostName'."
$azureRMWebAppConnectionDetails = @{}
$azureRMWebAppConnectionDetails.KuduHostName = $kuduHostName
$azureRMWebAppConnectionDetails.UserName = $userName
$azureRMWebAppConnectionDetails.UserPassword = $userPassword
return $azureRMWebAppConnectionDetails
}
function Get-ProfileForMSDeployPublishMethod
{
param([String][Parameter(Mandatory=$true)] $publishProfileContent)
# Converting publish profile content into object
$publishProfileXML = [xml] $publishProfileContent
$publishProfileObject = $publishProfileXML.publishData.publishProfile
# Getting profile for publish method 'MSDeploy'
$webAppProfileForMSDeploy = $publishProfileObject | Where-Object {$_.publishMethod -eq 'MSDeploy'}
return $webAppProfileForMSDeploy
}
function Get-AzureRMWebAppProfileForMSDeployWithProductionSlot
{
param([String][Parameter(Mandatory=$true)] $webAppName,
[String][Parameter(Mandatory=$true)] $resourceGroupName)
$currentDir = (Get-Item -Path ".\").FullName
$tmpFileName = [guid]::NewGuid().ToString() + ".pubxml"
$pubXmlFile = Join-Path $currentDir $tmpFileName
Write-Verbose "`t [Azure Call]Getting publish profile file for azureRM WebApp:'$webAppName' under Production Slot at location: '$pubXmlFile'."
$publishProfileContent = Get-AzureRMWebAppPublishingProfile -Name $webAppName -ResourceGroupName $resourceGroupName -OutputFile $pubXmlFile
Write-Verbose "`t [Azure Call]Got publish profile file for azureRM WebApp:'$webAppName' under Production Slot at location: '$pubXmlFile'."
Remove-Item -Path $pubXmlFile -Force
Write-Verbose "`t Deleted publish profile file at location: '$pubXmlFile'"
$webAppProfileForMSDeploy = Get-ProfileForMSDeployPublishMethod -publishProfileContent $publishProfileContent
return $webAppProfileForMSDeploy
}
function Get-AzureRMWebAppProfileForMSDeployWithSpecificSlot
{
param([String][Parameter(Mandatory=$true)] $webAppName,
[String][Parameter(Mandatory=$true)] $resourceGroupName,
[String][Parameter(Mandatory=$true)] $slotName)
$currentDir = (Get-Item -Path ".\").FullName
$tmpFileName = [guid]::NewGuid().ToString() + ".pubxml"
$pubXmlFile = Join-Path $currentDir $tmpFileName
Write-Verbose "`t [Azure Call]Getting publish profile file for azureRM WebApp:'$webAppName' under Slot:'$slotName' at location: '$pubXmlFile'."
$publishProfileContent = Get-AzureRMWebAppSlotPublishingProfile -Name $webAppName -ResourceGroupName $resourceGroupName -Slot $slotName -OutputFile $pubXmlFile
Write-Verbose "`t [Azure Call]Got publish profile file for azureRM WebApp:'$webAppName' under Slot:'$slotName' at location: '$pubXmlFile'."
Remove-Item -Path $pubXmlFile -Force
Write-Verbose "`t Deleted publish profile file at location: '$pubXmlFile'"
$webAppProfileForMSDeploy = Get-ProfileForMSDeployPublishMethod -publishProfileContent $publishProfileContent
return $webAppProfileForMSDeploy
}
| {
"content_hash": "0997d9c747b648e517f28726a29c3eb3",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 178,
"avg_line_length": 43.41379310344828,
"alnum_prop": 0.7362986497220015,
"repo_name": "cwoolum/vso-agent-tasks",
"id": "11e422372409d4a14b890533f8ada31d1b2a081d",
"size": "7654",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tasks/AzureRmWebAppDeployment/AzureUtilityGTE1.0.ps1",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "46762"
},
{
"name": "PowerShell",
"bytes": "1012400"
},
{
"name": "Shell",
"bytes": "4440"
},
{
"name": "TypeScript",
"bytes": "261137"
}
],
"symlink_target": ""
} |
#include <stdio.h>
#include <string.h>
#include <math.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <unistd.h>
#include <pthread.h>
#include <alloca.h>
#include <malloc.h>
#include <assert.h>
#include <errno.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/poll.h>
#include <limits.h>
#include <semaphore.h>
#ifdef __FreeBSD__
# include <sys/soundcard.h>
# define DEVICE_NAME_BASE "/dev/dsp"
#elif defined __linux__
# include <linux/soundcard.h>
# define DEVICE_NAME_BASE "/dev/dsp"
#else
# include <machine/soundcard.h> /* JH20010905 */
# define DEVICE_NAME_BASE "/dev/audio"
#endif
#include "portaudio.h"
#include "pa_util.h"
#include "pa_allocation.h"
#include "pa_hostapi.h"
#include "pa_stream.h"
#include "pa_cpuload.h"
#include "pa_process.h"
#include "../pa_unix/pa_unix_util.h"
static int sysErr_;
static pthread_t mainThread_;
/* Check return value of system call, and map it to PaError */
#define ENSURE_(expr, code) \
do { \
if( UNLIKELY( (sysErr_ = (expr)) < 0 ) ) \
{ \
/* PaUtil_SetLastHostErrorInfo should only be used in the main thread */ \
if( (code) == paUnanticipatedHostError && pthread_self() == mainThread_ ) \
{ \
PaUtil_SetLastHostErrorInfo( paALSA, sysErr_, strerror( errno ) ); \
} \
\
PaUtil_DebugPrint(( "Expression '" #expr "' failed in '" __FILE__ "', line: " STRINGIZE( __LINE__ ) "\n" )); \
result = (code); \
goto error; \
} \
} while( 0 );
#ifndef AFMT_S16_NE
#define AFMT_S16_NE Get_AFMT_S16_NE()
/*********************************************************************
* Some versions of OSS do not define AFMT_S16_NE. So check CPU.
* PowerPC is Big Endian. X86 is Little Endian.
*/
static int Get_AFMT_S16_NE( void )
{
long testData = 1;
char *ptr = (char *) &testData;
int isLittle = ( *ptr == 1 ); /* Does address point to least significant byte? */
return isLittle ? AFMT_S16_LE : AFMT_S16_BE;
}
#endif
/* PaOSSHostApiRepresentation - host api datastructure specific to this implementation */
typedef struct
{
PaUtilHostApiRepresentation inheritedHostApiRep;
PaUtilStreamInterface callbackStreamInterface;
PaUtilStreamInterface blockingStreamInterface;
PaUtilAllocationGroup *allocations;
PaHostApiIndex hostApiIndex;
}
PaOSSHostApiRepresentation;
/** Per-direction structure for PaOssStream.
*
* Aspect StreamChannels: In case the user requests to open the same device for both capture and playback,
* but with different number of channels we will have to adapt between the number of user and host
* channels for at least one direction, since the configuration space is the same for both directions
* of an OSS device.
*/
typedef struct
{
int fd;
const char *devName;
int userChannelCount, hostChannelCount;
int userInterleaved;
void *buffer;
PaSampleFormat userFormat, hostFormat;
double latency;
unsigned long hostFrames, numBufs;
void **userBuffers; /* For non-interleaved blocking */
} PaOssStreamComponent;
/** Implementation specific representation of a PaStream.
*
*/
typedef struct PaOssStream
{
PaUtilStreamRepresentation streamRepresentation;
PaUtilCpuLoadMeasurer cpuLoadMeasurer;
PaUtilBufferProcessor bufferProcessor;
PaUtilThreading threading;
int sharedDevice;
unsigned long framesPerHostBuffer;
int triggered; /* Have the devices been triggered yet (first start) */
int isActive;
int isStopped;
int lastPosPtr;
double lastStreamBytes;
int framesProcessed;
double sampleRate;
int callbackMode;
int callbackStop, callbackAbort;
PaOssStreamComponent *capture, *playback;
unsigned long pollTimeout;
sem_t semaphore;
}
PaOssStream;
typedef enum {
StreamMode_In,
StreamMode_Out
} StreamMode;
/* prototypes for functions declared in this file */
static void Terminate( struct PaUtilHostApiRepresentation *hostApi );
static PaError IsFormatSupported( struct PaUtilHostApiRepresentation *hostApi,
const PaStreamParameters *inputParameters,
const PaStreamParameters *outputParameters,
double sampleRate );
static PaError OpenStream( struct PaUtilHostApiRepresentation *hostApi,
PaStream** s,
const PaStreamParameters *inputParameters,
const PaStreamParameters *outputParameters,
double sampleRate,
unsigned long framesPerBuffer,
PaStreamFlags streamFlags,
PaStreamCallback *streamCallback,
void *userData );
static PaError CloseStream( PaStream* stream );
static PaError StartStream( PaStream *stream );
static PaError StopStream( PaStream *stream );
static PaError AbortStream( PaStream *stream );
static PaError IsStreamStopped( PaStream *s );
static PaError IsStreamActive( PaStream *stream );
static PaTime GetStreamTime( PaStream *stream );
static double GetStreamCpuLoad( PaStream* stream );
static PaError ReadStream( PaStream* stream, void *buffer, unsigned long frames );
static PaError WriteStream( PaStream* stream, const void *buffer, unsigned long frames );
static signed long GetStreamReadAvailable( PaStream* stream );
static signed long GetStreamWriteAvailable( PaStream* stream );
static PaError BuildDeviceList( PaOSSHostApiRepresentation *hostApi );
/** Initialize the OSS API implementation.
*
* This function will initialize host API datastructures and query host devices for information.
*
* Aspect DeviceCapabilities: Enumeration of host API devices is initiated from here
*
* Aspect FreeResources: If an error is encountered under way we have to free each resource allocated in this function,
* this happens with the usual "error" label.
*/
PaError PaOSS_Initialize( PaUtilHostApiRepresentation **hostApi, PaHostApiIndex hostApiIndex )
{
PaError result = paNoError;
PaOSSHostApiRepresentation *ossHostApi = NULL;
PA_UNLESS( ossHostApi = (PaOSSHostApiRepresentation*)PaUtil_AllocateMemory( sizeof(PaOSSHostApiRepresentation) ),
paInsufficientMemory );
PA_UNLESS( ossHostApi->allocations = PaUtil_CreateAllocationGroup(), paInsufficientMemory );
ossHostApi->hostApiIndex = hostApiIndex;
/* Initialize host API structure */
*hostApi = &ossHostApi->inheritedHostApiRep;
(*hostApi)->info.structVersion = 1;
(*hostApi)->info.type = paOSS;
(*hostApi)->info.name = "OSS";
(*hostApi)->Terminate = Terminate;
(*hostApi)->OpenStream = OpenStream;
(*hostApi)->IsFormatSupported = IsFormatSupported;
PA_ENSURE( BuildDeviceList( ossHostApi ) );
PaUtil_InitializeStreamInterface( &ossHostApi->callbackStreamInterface, CloseStream, StartStream,
StopStream, AbortStream, IsStreamStopped, IsStreamActive,
GetStreamTime, GetStreamCpuLoad,
PaUtil_DummyRead, PaUtil_DummyWrite,
PaUtil_DummyGetReadAvailable,
PaUtil_DummyGetWriteAvailable );
PaUtil_InitializeStreamInterface( &ossHostApi->blockingStreamInterface, CloseStream, StartStream,
StopStream, AbortStream, IsStreamStopped, IsStreamActive,
GetStreamTime, PaUtil_DummyGetCpuLoad,
ReadStream, WriteStream, GetStreamReadAvailable, GetStreamWriteAvailable );
mainThread_ = pthread_self();
return result;
error:
if( ossHostApi )
{
if( ossHostApi->allocations )
{
PaUtil_FreeAllAllocations( ossHostApi->allocations );
PaUtil_DestroyAllocationGroup( ossHostApi->allocations );
}
PaUtil_FreeMemory( ossHostApi );
}
return result;
}
PaError PaUtil_InitializeDeviceInfo( PaDeviceInfo *deviceInfo, const char *name, PaHostApiIndex hostApiIndex, int maxInputChannels,
int maxOutputChannels, PaTime defaultLowInputLatency, PaTime defaultLowOutputLatency, PaTime defaultHighInputLatency,
PaTime defaultHighOutputLatency, double defaultSampleRate, PaUtilAllocationGroup *allocations )
{
PaError result = paNoError;
deviceInfo->structVersion = 2;
if( allocations )
{
size_t len = strlen( name ) + 1;
PA_UNLESS( deviceInfo->name = PaUtil_GroupAllocateMemory( allocations, len ), paInsufficientMemory );
strncpy( (char *)deviceInfo->name, name, len );
}
else
deviceInfo->name = name;
deviceInfo->hostApi = hostApiIndex;
deviceInfo->maxInputChannels = maxInputChannels;
deviceInfo->maxOutputChannels = maxOutputChannels;
deviceInfo->defaultLowInputLatency = defaultLowInputLatency;
deviceInfo->defaultLowOutputLatency = defaultLowOutputLatency;
deviceInfo->defaultHighInputLatency = defaultHighInputLatency;
deviceInfo->defaultHighOutputLatency = defaultHighOutputLatency;
deviceInfo->defaultSampleRate = defaultSampleRate;
error:
return result;
}
static PaError QueryDirection( const char *deviceName, StreamMode mode, double *defaultSampleRate, int *maxChannelCount,
double *defaultLowLatency, double *defaultHighLatency )
{
PaError result = paNoError;
int numChannels, maxNumChannels;
int busy = 0;
int devHandle = -1;
int sr;
*maxChannelCount = 0; /* Default value in case this fails */
if ( (devHandle = open( deviceName, (mode == StreamMode_In ? O_RDONLY : O_WRONLY) | O_NONBLOCK )) < 0 )
{
if( errno == EBUSY || errno == EAGAIN )
{
PA_DEBUG(( "%s: Device %s busy\n", __FUNCTION__, deviceName ));
}
else
{
PA_DEBUG(( "%s: Can't access device: %s\n", __FUNCTION__, strerror( errno ) ));
}
return paDeviceUnavailable;
}
/* Negotiate for the maximum number of channels for this device. PLB20010927
* Consider up to 16 as the upper number of channels.
* Variable maxNumChannels should contain the actual upper limit after the call.
* Thanks to John Lazzaro and Heiko Purnhagen for suggestions.
*/
maxNumChannels = 0;
for( numChannels = 1; numChannels <= 16; numChannels++ )
{
int temp = numChannels;
if( ioctl( devHandle, SNDCTL_DSP_CHANNELS, &temp ) < 0 )
{
busy = EAGAIN == errno || EBUSY == errno;
/* ioctl() failed so bail out if we already have stereo */
if( maxNumChannels >= 2 )
break;
}
else
{
/* ioctl() worked but bail out if it does not support numChannels.
* We don't want to leave gaps in the numChannels supported.
*/
if( (numChannels > 2) && (temp != numChannels) )
break;
if( temp > maxNumChannels )
maxNumChannels = temp; /* Save maximum. */
}
}
/* A: We're able to open a device for capture if it's busy playing back and vice versa,
* but we can't configure anything */
if( 0 == maxNumChannels && busy )
{
result = paDeviceUnavailable;
goto error;
}
/* The above negotiation may fail for an old driver so try this older technique. */
if( maxNumChannels < 1 )
{
int stereo = 1;
if( ioctl( devHandle, SNDCTL_DSP_STEREO, &stereo ) < 0 )
{
maxNumChannels = 1;
}
else
{
maxNumChannels = (stereo) ? 2 : 1;
}
PA_DEBUG(( "%s: use SNDCTL_DSP_STEREO, maxNumChannels = %d\n", __FUNCTION__, maxNumChannels ))
}
/* During channel negotiation, the last ioctl() may have failed. This can
* also cause sample rate negotiation to fail. Hence the following, to return
* to a supported number of channels. SG20011005 */
{
/* use most reasonable default value */
int temp = PA_MIN( maxNumChannels, 2 );
ENSURE_( ioctl( devHandle, SNDCTL_DSP_CHANNELS, &temp ), paUnanticipatedHostError );
}
/* Get supported sample rate closest to 44100 Hz */
if( *defaultSampleRate < 0 )
{
sr = 44100;
if( ioctl( devHandle, SNDCTL_DSP_SPEED, &sr ) < 0 )
{
result = paUnanticipatedHostError;
goto error;
}
*defaultSampleRate = sr;
}
*maxChannelCount = maxNumChannels;
/* TODO */
*defaultLowLatency = 512. / *defaultSampleRate;
*defaultHighLatency = 2048. / *defaultSampleRate;
error:
if( devHandle >= 0 )
close( devHandle );
return result;
}
/** Query OSS device.
*
* This is where PaDeviceInfo objects are constructed and filled in with relevant information.
*
* Aspect DeviceCapabilities: The inferred device capabilities are recorded in a PaDeviceInfo object that is constructed
* in place.
*/
static PaError QueryDevice( char *deviceName, PaOSSHostApiRepresentation *ossApi, PaDeviceInfo **deviceInfo )
{
PaError result = paNoError;
double sampleRate = -1.;
int maxInputChannels, maxOutputChannels;
PaTime defaultLowInputLatency, defaultLowOutputLatency, defaultHighInputLatency, defaultHighOutputLatency;
PaError tmpRes = paNoError;
int busy = 0;
*deviceInfo = NULL;
/* douglas:
we have to do this querying in a slightly different order. apparently
some sound cards will give you different info based on their settins.
e.g. a card might give you stereo at 22kHz but only mono at 44kHz.
the correct order for OSS is: format, channels, sample rate
*/
/* Aspect StreamChannels: The number of channels supported for a device may depend on the mode it is
* opened in, it may have more channels available for capture than playback and vice versa. Therefore
* we will open the device in both read- and write-only mode to determine the supported number.
*/
if( (tmpRes = QueryDirection( deviceName, StreamMode_In, &sampleRate, &maxInputChannels, &defaultLowInputLatency,
&defaultHighInputLatency )) != paNoError )
{
if( tmpRes != paDeviceUnavailable )
{
PA_DEBUG(( "%s: Querying device %s for capture failed!\n", __FUNCTION__, deviceName ));
/* PA_ENSURE( tmpRes ); */
}
++busy;
}
if( (tmpRes = QueryDirection( deviceName, StreamMode_Out, &sampleRate, &maxOutputChannels, &defaultLowOutputLatency,
&defaultHighOutputLatency )) != paNoError )
{
if( tmpRes != paDeviceUnavailable )
{
PA_DEBUG(( "%s: Querying device %s for playback failed!\n", __FUNCTION__, deviceName ));
/* PA_ENSURE( tmpRes ); */
}
++busy;
}
assert( 0 <= busy && busy <= 2 );
if( 2 == busy ) /* Both directions are unavailable to us */
{
result = paDeviceUnavailable;
goto error;
}
PA_UNLESS( *deviceInfo = PaUtil_GroupAllocateMemory( ossApi->allocations, sizeof (PaDeviceInfo) ), paInsufficientMemory );
PA_ENSURE( PaUtil_InitializeDeviceInfo( *deviceInfo, deviceName, ossApi->hostApiIndex, maxInputChannels, maxOutputChannels,
defaultLowInputLatency, defaultLowOutputLatency, defaultHighInputLatency, defaultHighOutputLatency, sampleRate,
ossApi->allocations ) );
error:
return result;
}
/** Query host devices.
*
* Loop over host devices and query their capabilitiesu
*
* Aspect DeviceCapabilities: This function calls QueryDevice on each device entry and receives a filled in PaDeviceInfo object
* per device, these are placed in the host api representation's deviceInfos array.
*/
static PaError BuildDeviceList( PaOSSHostApiRepresentation *ossApi )
{
PaError result = paNoError;
PaUtilHostApiRepresentation *commonApi = &ossApi->inheritedHostApiRep;
int i;
int numDevices = 0, maxDeviceInfos = 1;
PaDeviceInfo **deviceInfos = NULL;
/* These two will be set to the first working input and output device, respectively */
commonApi->info.defaultInputDevice = paNoDevice;
commonApi->info.defaultOutputDevice = paNoDevice;
/* Find devices by calling QueryDevice on each
* potential device names. When we find a valid one,
* add it to a linked list.
* A: Can there only be 10 devices? */
for( i = 0; i < 10; i++ )
{
char deviceName[32];
PaDeviceInfo *deviceInfo;
int testResult;
struct stat stbuf;
if( i == 0 )
snprintf(deviceName, sizeof (deviceName), "%s", DEVICE_NAME_BASE);
else
snprintf(deviceName, sizeof (deviceName), "%s%d", DEVICE_NAME_BASE, i);
/* PA_DEBUG(("PaOSS BuildDeviceList: trying device %s\n", deviceName )); */
if( stat( deviceName, &stbuf ) < 0 )
{
if( ENOENT != errno )
PA_DEBUG(( "%s: Error stat'ing %s: %s\n", __FUNCTION__, deviceName, strerror( errno ) ));
continue;
}
if( (testResult = QueryDevice( deviceName, ossApi, &deviceInfo )) != paNoError )
{
if( testResult != paDeviceUnavailable )
PA_ENSURE( testResult );
continue;
}
++numDevices;
if( !deviceInfos || numDevices > maxDeviceInfos )
{
maxDeviceInfos *= 2;
PA_UNLESS( deviceInfos = (PaDeviceInfo **) realloc( deviceInfos, maxDeviceInfos * sizeof (PaDeviceInfo *) ),
paInsufficientMemory );
}
{
int devIdx = numDevices - 1;
deviceInfos[devIdx] = deviceInfo;
if( commonApi->info.defaultInputDevice == paNoDevice && deviceInfo->maxInputChannels > 0 )
commonApi->info.defaultInputDevice = devIdx;
if( commonApi->info.defaultOutputDevice == paNoDevice && deviceInfo->maxOutputChannels > 0 )
commonApi->info.defaultOutputDevice = devIdx;
}
}
/* Make an array of PaDeviceInfo pointers out of the linked list */
PA_DEBUG(("PaOSS %s: Total number of devices found: %d\n", __FUNCTION__, numDevices));
commonApi->deviceInfos = (PaDeviceInfo**)PaUtil_GroupAllocateMemory(
ossApi->allocations, sizeof(PaDeviceInfo*) * numDevices );
memcpy( commonApi->deviceInfos, deviceInfos, numDevices * sizeof (PaDeviceInfo *) );
commonApi->info.deviceCount = numDevices;
error:
free( deviceInfos );
return result;
}
static void Terminate( struct PaUtilHostApiRepresentation *hostApi )
{
PaOSSHostApiRepresentation *ossHostApi = (PaOSSHostApiRepresentation*)hostApi;
if( ossHostApi->allocations )
{
PaUtil_FreeAllAllocations( ossHostApi->allocations );
PaUtil_DestroyAllocationGroup( ossHostApi->allocations );
}
PaUtil_FreeMemory( ossHostApi );
}
static PaError IsFormatSupported( struct PaUtilHostApiRepresentation *hostApi,
const PaStreamParameters *inputParameters,
const PaStreamParameters *outputParameters,
double sampleRate )
{
PaError result = paNoError;
PaDeviceIndex device;
PaDeviceInfo *deviceInfo;
char *deviceName;
int inputChannelCount, outputChannelCount;
int tempDevHandle = -1;
int flags;
PaSampleFormat inputSampleFormat, outputSampleFormat;
if( inputParameters )
{
inputChannelCount = inputParameters->channelCount;
inputSampleFormat = inputParameters->sampleFormat;
/* unless alternate device specification is supported, reject the use of
paUseHostApiSpecificDeviceSpecification */
if( inputParameters->device == paUseHostApiSpecificDeviceSpecification )
return paInvalidDevice;
/* check that input device can support inputChannelCount */
if( inputChannelCount > hostApi->deviceInfos[ inputParameters->device ]->maxInputChannels )
return paInvalidChannelCount;
/* validate inputStreamInfo */
if( inputParameters->hostApiSpecificStreamInfo )
return paIncompatibleHostApiSpecificStreamInfo; /* this implementation doesn't use custom stream info */
}
else
{
inputChannelCount = 0;
}
if( outputParameters )
{
outputChannelCount = outputParameters->channelCount;
outputSampleFormat = outputParameters->sampleFormat;
/* unless alternate device specification is supported, reject the use of
paUseHostApiSpecificDeviceSpecification */
if( outputParameters->device == paUseHostApiSpecificDeviceSpecification )
return paInvalidDevice;
/* check that output device can support inputChannelCount */
if( outputChannelCount > hostApi->deviceInfos[ outputParameters->device ]->maxOutputChannels )
return paInvalidChannelCount;
/* validate outputStreamInfo */
if( outputParameters->hostApiSpecificStreamInfo )
return paIncompatibleHostApiSpecificStreamInfo; /* this implementation doesn't use custom stream info */
}
else
{
outputChannelCount = 0;
}
if (inputChannelCount == 0 && outputChannelCount == 0)
return paInvalidChannelCount;
/* if full duplex, make sure that they're the same device */
if (inputChannelCount > 0 && outputChannelCount > 0 &&
inputParameters->device != outputParameters->device)
return paInvalidDevice;
/* if full duplex, also make sure that they're the same number of channels */
if (inputChannelCount > 0 && outputChannelCount > 0 &&
inputChannelCount != outputChannelCount)
return paInvalidChannelCount;
/* open the device so we can do more tests */
if( inputChannelCount > 0 )
{
result = PaUtil_DeviceIndexToHostApiDeviceIndex(&device, inputParameters->device, hostApi);
if (result != paNoError)
return result;
}
else
{
result = PaUtil_DeviceIndexToHostApiDeviceIndex(&device, outputParameters->device, hostApi);
if (result != paNoError)
return result;
}
deviceInfo = hostApi->deviceInfos[device];
deviceName = (char *)deviceInfo->name;
flags = O_NONBLOCK;
if (inputChannelCount > 0 && outputChannelCount > 0)
flags |= O_RDWR;
else if (inputChannelCount > 0)
flags |= O_RDONLY;
else
flags |= O_WRONLY;
ENSURE_( tempDevHandle = open( deviceInfo->name, flags ), paDeviceUnavailable );
/* PaOssStream_Configure will do the rest of the checking for us */
/* PA_ENSURE( PaOssStream_Configure( tempDevHandle, deviceName, outputChannelCount, &sampleRate ) ); */
/* everything succeeded! */
error:
if( tempDevHandle >= 0 )
close( tempDevHandle );
return result;
}
/** Validate stream parameters.
*
* Aspect StreamChannels: We verify that the number of channels is within the allowed range for the device
*/
static PaError ValidateParameters( const PaStreamParameters *parameters, const PaDeviceInfo *deviceInfo, StreamMode mode )
{
int maxChans;
assert( parameters );
if( parameters->device == paUseHostApiSpecificDeviceSpecification )
{
return paInvalidDevice;
}
maxChans = (mode == StreamMode_In ? deviceInfo->maxInputChannels :
deviceInfo->maxOutputChannels);
if( parameters->channelCount > maxChans )
{
return paInvalidChannelCount;
}
return paNoError;
}
static PaError PaOssStreamComponent_Initialize( PaOssStreamComponent *component, const PaStreamParameters *parameters,
int callbackMode, int fd, const char *deviceName )
{
PaError result = paNoError;
assert( component );
memset( component, 0, sizeof (PaOssStreamComponent) );
component->fd = fd;
component->devName = deviceName;
component->userChannelCount = parameters->channelCount;
component->userFormat = parameters->sampleFormat;
component->latency = parameters->suggestedLatency;
component->userInterleaved = !(parameters->sampleFormat & paNonInterleaved);
if( !callbackMode && !component->userInterleaved )
{
/* Pre-allocate non-interleaved user provided buffers */
PA_UNLESS( component->userBuffers = PaUtil_AllocateMemory( sizeof (void *) * component->userChannelCount ),
paInsufficientMemory );
}
error:
return result;
}
static void PaOssStreamComponent_Terminate( PaOssStreamComponent *component )
{
assert( component );
if( component->fd >= 0 )
close( component->fd );
if( component->buffer )
PaUtil_FreeMemory( component->buffer );
if( component->userBuffers )
PaUtil_FreeMemory( component->userBuffers );
PaUtil_FreeMemory( component );
}
static PaError ModifyBlocking( int fd, int blocking )
{
PaError result = paNoError;
int fflags;
ENSURE_( fflags = fcntl( fd, F_GETFL ), paUnanticipatedHostError );
if( blocking )
fflags &= ~O_NONBLOCK;
else
fflags |= O_NONBLOCK;
ENSURE_( fcntl( fd, F_SETFL, fflags ), paUnanticipatedHostError );
error:
return result;
}
static PaError OpenDevices( const char *idevName, const char *odevName, int *idev, int *odev )
{
PaError result = paNoError;
int flags = O_NONBLOCK, duplex = 0;
int enableBits = 0;
*idev = *odev = -1;
if( idevName && odevName )
{
duplex = 1;
flags |= O_RDWR;
}
else if( idevName )
flags |= O_RDONLY;
else
flags |= O_WRONLY;
/* open first in nonblocking mode, in case it's busy...
* A: then unset the non-blocking attribute */
assert( flags & O_NONBLOCK );
if( idevName )
{
ENSURE_( *idev = open( idevName, flags ), paDeviceUnavailable );
PA_ENSURE( ModifyBlocking( *idev, 1 ) ); /* Blocking */
/* Initially disable */
enableBits = ~PCM_ENABLE_INPUT;
ENSURE_( ioctl( *idev, SNDCTL_DSP_SETTRIGGER, &enableBits ), paUnanticipatedHostError );
}
if( odevName )
{
if( !idevName )
{
ENSURE_( *odev = open( odevName, flags ), paDeviceUnavailable );
PA_ENSURE( ModifyBlocking( *odev, 1 ) ); /* Blocking */
/* Initially disable */
enableBits = ~PCM_ENABLE_OUTPUT;
ENSURE_( ioctl( *odev, SNDCTL_DSP_SETTRIGGER, &enableBits ), paUnanticipatedHostError );
}
else
{
ENSURE_( *odev = dup( *idev ), paUnanticipatedHostError );
}
}
error:
return result;
}
static PaError PaOssStream_Initialize( PaOssStream *stream, const PaStreamParameters *inputParameters, const PaStreamParameters *outputParameters,
PaStreamCallback callback, void *userData, PaStreamFlags streamFlags,
PaOSSHostApiRepresentation *ossApi )
{
PaError result = paNoError;
int idev, odev;
PaUtilHostApiRepresentation *hostApi = &ossApi->inheritedHostApiRep;
const char *idevName = NULL, *odevName = NULL;
assert( stream );
memset( stream, 0, sizeof (PaOssStream) );
stream->isStopped = 1;
PA_ENSURE( PaUtil_InitializeThreading( &stream->threading ) );
if( inputParameters && outputParameters )
{
if( inputParameters->device == outputParameters->device )
stream->sharedDevice = 1;
}
if( inputParameters )
idevName = hostApi->deviceInfos[inputParameters->device]->name;
if( outputParameters )
odevName = hostApi->deviceInfos[outputParameters->device]->name;
PA_ENSURE( OpenDevices( idevName, odevName, &idev, &odev ) );
if( inputParameters )
{
PA_UNLESS( stream->capture = PaUtil_AllocateMemory( sizeof (PaOssStreamComponent) ), paInsufficientMemory );
PA_ENSURE( PaOssStreamComponent_Initialize( stream->capture, inputParameters, callback != NULL, idev, idevName ) );
}
if( outputParameters )
{
PA_UNLESS( stream->playback = PaUtil_AllocateMemory( sizeof (PaOssStreamComponent) ), paInsufficientMemory );
PA_ENSURE( PaOssStreamComponent_Initialize( stream->playback, outputParameters, callback != NULL, odev, odevName ) );
}
if( callback != NULL )
{
PaUtil_InitializeStreamRepresentation( &stream->streamRepresentation,
&ossApi->callbackStreamInterface, callback, userData );
stream->callbackMode = 1;
}
else
{
PaUtil_InitializeStreamRepresentation( &stream->streamRepresentation,
&ossApi->blockingStreamInterface, callback, userData );
}
ENSURE_( sem_init( &stream->semaphore, 0, 0 ), paInternalError );
error:
return result;
}
static void PaOssStream_Terminate( PaOssStream *stream )
{
assert( stream );
PaUtil_TerminateStreamRepresentation( &stream->streamRepresentation );
PaUtil_TerminateThreading( &stream->threading );
if( stream->capture )
PaOssStreamComponent_Terminate( stream->capture );
if( stream->playback )
PaOssStreamComponent_Terminate( stream->playback );
sem_destroy( &stream->semaphore );
PaUtil_FreeMemory( stream );
}
/** Translate from PA format to OSS native.
*
*/
static PaError Pa2OssFormat( PaSampleFormat paFormat, int *ossFormat )
{
switch( paFormat )
{
case paUInt8:
*ossFormat = AFMT_U8;
break;
case paInt8:
*ossFormat = AFMT_S8;
break;
case paInt16:
*ossFormat = AFMT_S16_NE;
break;
default:
return paInternalError; /* This shouldn't happen */
}
return paNoError;
}
/** Return the PA-compatible formats that this device can support.
*
*/
static PaError GetAvailableFormats( PaOssStreamComponent *component, PaSampleFormat *availableFormats )
{
PaError result = paNoError;
int mask = 0;
PaSampleFormat frmts = 0;
ENSURE_( ioctl( component->fd, SNDCTL_DSP_GETFMTS, &mask ), paUnanticipatedHostError );
if( mask & AFMT_U8 )
frmts |= paUInt8;
if( mask & AFMT_S8 )
frmts |= paInt8;
if( mask & AFMT_S16_NE )
frmts |= paInt16;
else
result = paSampleFormatNotSupported;
*availableFormats = frmts;
error:
return result;
}
static unsigned int PaOssStreamComponent_FrameSize( PaOssStreamComponent *component )
{
return Pa_GetSampleSize( component->hostFormat ) * component->hostChannelCount;
}
/** Buffer size in bytes.
*
*/
static unsigned long PaOssStreamComponent_BufferSize( PaOssStreamComponent *component )
{
return PaOssStreamComponent_FrameSize( component ) * component->hostFrames * component->numBufs;
}
static int CalcHigherLogTwo( int n )
{
int log2 = 0;
while( (1<<log2) < n ) log2++;
return log2;
}
static PaError PaOssStreamComponent_Configure( PaOssStreamComponent *component, double sampleRate, unsigned long framesPerBuffer,
StreamMode streamMode, PaOssStreamComponent *master )
{
PaError result = paNoError;
int temp, nativeFormat;
int sr = (int)sampleRate;
PaSampleFormat availableFormats, hostFormat;
int chans = component->userChannelCount;
int frgmt;
int numBufs;
int bytesPerBuf;
double bufSz;
unsigned long fragSz;
audio_buf_info bufInfo;
/* We may have a situation where only one component (the master) is configured, if both point to the same device.
* In that case, the second component will copy settings from the other */
if( !master )
{
/* Aspect BufferSettings: If framesPerBuffer is unspecified we have to infer a suitable fragment size.
* The hardware need not respect the requested fragment size, so we may have to adapt.
*/
if( framesPerBuffer == paFramesPerBufferUnspecified )
{
bufSz = component->latency * sampleRate;
fragSz = bufSz / 4;
}
else
{
fragSz = framesPerBuffer;
bufSz = component->latency * sampleRate + fragSz; /* Latency + 1 buffer */
}
PA_ENSURE( GetAvailableFormats( component, &availableFormats ) );
hostFormat = PaUtil_SelectClosestAvailableFormat( availableFormats, component->userFormat );
/* OSS demands at least 2 buffers, and 16 bytes per buffer */
numBufs = PA_MAX( bufSz / fragSz, 2 );
bytesPerBuf = PA_MAX( fragSz * Pa_GetSampleSize( hostFormat ) * chans, 16 );
/* The fragment parameters are encoded like this:
* Most significant byte: number of fragments
* Least significant byte: exponent of fragment size (i.e., for 256, 8)
*/
frgmt = (numBufs << 16) + (CalcHigherLogTwo( bytesPerBuf ) & 0xffff);
ENSURE_( ioctl( component->fd, SNDCTL_DSP_SETFRAGMENT, &frgmt ), paUnanticipatedHostError );
/* A: according to the OSS programmer's guide parameters should be set in this order:
* format, channels, rate */
/* This format should be deemed good before we get this far */
PA_ENSURE( Pa2OssFormat( hostFormat, &temp ) );
nativeFormat = temp;
ENSURE_( ioctl( component->fd, SNDCTL_DSP_SETFMT, &temp ), paUnanticipatedHostError );
PA_UNLESS( temp == nativeFormat, paInternalError );
/* try to set the number of channels */
ENSURE_( ioctl( component->fd, SNDCTL_DSP_CHANNELS, &chans ), paSampleFormatNotSupported ); /* XXX: Should be paInvalidChannelCount? */
/* It's possible that the minimum number of host channels is greater than what the user requested */
PA_UNLESS( chans >= component->userChannelCount, paInvalidChannelCount );
/* try to set the sample rate */
ENSURE_( ioctl( component->fd, SNDCTL_DSP_SPEED, &sr ), paInvalidSampleRate );
/* reject if there's no sample rate within 1% of the one requested */
if( (fabs( sampleRate - sr ) / sampleRate) > 0.01 )
{
PA_DEBUG(("%s: Wanted %f, closest sample rate was %d\n", __FUNCTION__, sampleRate, sr ));
PA_ENSURE( paInvalidSampleRate );
}
ENSURE_( ioctl( component->fd, streamMode == StreamMode_In ? SNDCTL_DSP_GETISPACE : SNDCTL_DSP_GETOSPACE, &bufInfo ),
paUnanticipatedHostError );
component->numBufs = bufInfo.fragstotal;
/* This needs to be the last ioctl call before the first read/write, according to the OSS programmer's guide */
ENSURE_( ioctl( component->fd, SNDCTL_DSP_GETBLKSIZE, &bytesPerBuf ), paUnanticipatedHostError );
component->hostFrames = bytesPerBuf / Pa_GetSampleSize( hostFormat ) / chans;
component->hostChannelCount = chans;
component->hostFormat = hostFormat;
}
else
{
component->hostFormat = master->hostFormat;
component->hostFrames = master->hostFrames;
component->hostChannelCount = master->hostChannelCount;
component->numBufs = master->numBufs;
}
PA_UNLESS( component->buffer = PaUtil_AllocateMemory( PaOssStreamComponent_BufferSize( component ) ),
paInsufficientMemory );
error:
return result;
}
static PaError PaOssStreamComponent_Read( PaOssStreamComponent *component, unsigned long *frames )
{
PaError result = paNoError;
size_t len = *frames * PaOssStreamComponent_FrameSize( component );
ssize_t bytesRead;
ENSURE_( bytesRead = read( component->fd, component->buffer, len ), paUnanticipatedHostError );
*frames = bytesRead / PaOssStreamComponent_FrameSize( component );
/* TODO: Handle condition where number of frames read doesn't equal number of frames requested */
error:
return result;
}
static PaError PaOssStreamComponent_Write( PaOssStreamComponent *component, unsigned long *frames )
{
PaError result = paNoError;
size_t len = *frames * PaOssStreamComponent_FrameSize( component );
ssize_t bytesWritten;
ENSURE_( bytesWritten = write( component->fd, component->buffer, len ), paUnanticipatedHostError );
*frames = bytesWritten / PaOssStreamComponent_FrameSize( component );
/* TODO: Handle condition where number of frames written doesn't equal number of frames requested */
error:
return result;
}
/** Configure the stream according to input/output parameters.
*
* Aspect StreamChannels: The minimum number of channels supported by the device may exceed that requested by
* the user, if so we'll record the actual number of host channels and adapt later.
*/
static PaError PaOssStream_Configure( PaOssStream *stream, double sampleRate, unsigned long framesPerBuffer,
double *inputLatency, double *outputLatency )
{
PaError result = paNoError;
int duplex = stream->capture && stream->playback;
unsigned long framesPerHostBuffer = 0;
/* We should request full duplex first thing after opening the device */
if( duplex && stream->sharedDevice )
ENSURE_( ioctl( stream->capture->fd, SNDCTL_DSP_SETDUPLEX, 0 ), paUnanticipatedHostError );
if( stream->capture )
{
PaOssStreamComponent *component = stream->capture;
PaOssStreamComponent_Configure( component, sampleRate, framesPerBuffer, StreamMode_In, NULL );
assert( component->hostChannelCount > 0 );
assert( component->hostFrames > 0 );
*inputLatency = component->hostFrames * (component->numBufs - 1) / sampleRate;
}
if( stream->playback )
{
PaOssStreamComponent *component = stream->playback, *master = stream->sharedDevice ? stream->capture : NULL;
PA_ENSURE( PaOssStreamComponent_Configure( component, sampleRate, framesPerBuffer, StreamMode_Out,
master ) );
assert( component->hostChannelCount > 0 );
assert( component->hostFrames > 0 );
*outputLatency = component->hostFrames * (component->numBufs - 1) / sampleRate;
}
if( duplex )
framesPerHostBuffer = PA_MIN( stream->capture->hostFrames, stream->playback->hostFrames );
else if( stream->capture )
framesPerHostBuffer = stream->capture->hostFrames;
else if( stream->playback )
framesPerHostBuffer = stream->playback->hostFrames;
stream->framesPerHostBuffer = framesPerHostBuffer;
stream->pollTimeout = (int) ceil( 1e6 * framesPerHostBuffer / sampleRate ); /* Period in usecs, rounded up */
stream->sampleRate = stream->streamRepresentation.streamInfo.sampleRate = sampleRate;
error:
return result;
}
/* see pa_hostapi.h for a list of validity guarantees made about OpenStream parameters */
/** Open a PA OSS stream.
*
* Aspect StreamChannels: The number of channels is specified per direction (in/out), and can differ between the
* two. However, OSS doesn't support separate configuration spaces for capture and playback so if both
* directions are the same device we will demand the same number of channels. The number of channels can range
* from 1 to the maximum supported by the device.
*
* Aspect BufferSettings: If framesPerBuffer != paFramesPerBufferUnspecified the number of frames per callback
* must reflect this, in addition the host latency per device should approximate the corresponding
* suggestedLatency. Based on these constraints we need to determine a number of frames per host buffer that
* both capture and playback can agree on (they can be different devices), the buffer processor can adapt
* between host and user buffer size, but the ratio should preferably be integral.
*/
static PaError OpenStream( struct PaUtilHostApiRepresentation *hostApi,
PaStream** s,
const PaStreamParameters *inputParameters,
const PaStreamParameters *outputParameters,
double sampleRate,
unsigned long framesPerBuffer,
PaStreamFlags streamFlags,
PaStreamCallback *streamCallback,
void *userData )
{
PaError result = paNoError;
PaOSSHostApiRepresentation *ossHostApi = (PaOSSHostApiRepresentation*)hostApi;
PaOssStream *stream = NULL;
int inputChannelCount = 0, outputChannelCount = 0;
PaSampleFormat inputSampleFormat = 0, outputSampleFormat = 0, inputHostFormat = 0, outputHostFormat = 0;
const PaDeviceInfo *inputDeviceInfo = 0, *outputDeviceInfo = 0;
int bpInitialized = 0;
double inLatency, outLatency;
/* validate platform specific flags */
if( (streamFlags & paPlatformSpecificFlags) != 0 )
return paInvalidFlag; /* unexpected platform specific flag */
if( inputParameters )
{
/* unless alternate device specification is supported, reject the use of
paUseHostApiSpecificDeviceSpecification */
inputDeviceInfo = hostApi->deviceInfos[inputParameters->device];
PA_ENSURE( ValidateParameters( inputParameters, inputDeviceInfo, StreamMode_In ) );
inputChannelCount = inputParameters->channelCount;
inputSampleFormat = inputParameters->sampleFormat;
}
if( outputParameters )
{
outputDeviceInfo = hostApi->deviceInfos[outputParameters->device];
PA_ENSURE( ValidateParameters( outputParameters, outputDeviceInfo, StreamMode_Out ) );
outputChannelCount = outputParameters->channelCount;
outputSampleFormat = outputParameters->sampleFormat;
}
/* Aspect StreamChannels: We currently demand that number of input and output channels are the same, if the same
* device is opened for both directions
*/
if( inputChannelCount > 0 && outputChannelCount > 0 )
{
if( inputParameters->device == outputParameters->device )
{
if( inputParameters->channelCount != outputParameters->channelCount )
return paInvalidChannelCount;
}
}
/* allocate and do basic initialization of the stream structure */
PA_UNLESS( stream = (PaOssStream*)PaUtil_AllocateMemory( sizeof(PaOssStream) ), paInsufficientMemory );
PA_ENSURE( PaOssStream_Initialize( stream, inputParameters, outputParameters, streamCallback, userData, streamFlags, ossHostApi ) );
PA_ENSURE( PaOssStream_Configure( stream, sampleRate, framesPerBuffer, &inLatency, &outLatency ) );
PaUtil_InitializeCpuLoadMeasurer( &stream->cpuLoadMeasurer, sampleRate );
if( inputParameters )
{
inputHostFormat = stream->capture->hostFormat;
stream->streamRepresentation.streamInfo.inputLatency = inLatency +
PaUtil_GetBufferProcessorInputLatency( &stream->bufferProcessor ) / sampleRate;
}
if( outputParameters )
{
outputHostFormat = stream->playback->hostFormat;
stream->streamRepresentation.streamInfo.outputLatency = outLatency +
PaUtil_GetBufferProcessorOutputLatency( &stream->bufferProcessor ) / sampleRate;
}
/* Initialize buffer processor with fixed host buffer size.
* Aspect StreamSampleFormat: Here we commit the user and host sample formats, PA infrastructure will
* convert between the two.
*/
PA_ENSURE( PaUtil_InitializeBufferProcessor( &stream->bufferProcessor,
inputChannelCount, inputSampleFormat, inputHostFormat, outputChannelCount, outputSampleFormat,
outputHostFormat, sampleRate, streamFlags, framesPerBuffer, stream->framesPerHostBuffer,
paUtilFixedHostBufferSize, streamCallback, userData ) );
bpInitialized = 1;
*s = (PaStream*)stream;
return result;
error:
if( bpInitialized )
PaUtil_TerminateBufferProcessor( &stream->bufferProcessor );
if( stream )
PaOssStream_Terminate( stream );
return result;
}
/*! Poll on I/O filedescriptors.
Poll till we've determined there's data for read or write. In the full-duplex case,
we don't want to hang around forever waiting for either input or output frames, so
whenever we have a timed out filedescriptor we check if we're nearing under/overrun
for the other direction (critical limit set at one buffer). If so, we exit the waiting
state, and go on with what we got. We align the number of frames on a host buffer
boundary because it is possible that the buffer size differs for the two directions and
the host buffer size is a compromise between the two.
*/
static PaError PaOssStream_WaitForFrames( PaOssStream *stream, unsigned long *frames )
{
PaError result = paNoError;
int pollPlayback = 0, pollCapture = 0;
int captureAvail = INT_MAX, playbackAvail = INT_MAX, commonAvail;
audio_buf_info bufInfo;
/* int ofs = 0, nfds = stream->nfds; */
fd_set readFds, writeFds;
int nfds = 0;
struct timeval selectTimeval = {0, 0};
unsigned long timeout = stream->pollTimeout; /* In usecs */
int captureFd = -1, playbackFd = -1;
assert( stream );
assert( frames );
if( stream->capture )
{
pollCapture = 1;
captureFd = stream->capture->fd;
/* stream->capture->pfd->events = POLLIN; */
}
if( stream->playback )
{
pollPlayback = 1;
playbackFd = stream->playback->fd;
/* stream->playback->pfd->events = POLLOUT; */
}
FD_ZERO( &readFds );
FD_ZERO( &writeFds );
while( pollPlayback || pollCapture )
{
pthread_testcancel();
/* select may modify the timeout parameter */
selectTimeval.tv_usec = timeout;
nfds = 0;
if( pollCapture )
{
FD_SET( captureFd, &readFds );
nfds = captureFd + 1;
}
if( pollPlayback )
{
FD_SET( playbackFd, &writeFds );
nfds = PA_MAX( nfds, playbackFd + 1 );
}
ENSURE_( select( nfds, &readFds, &writeFds, NULL, &selectTimeval ), paUnanticipatedHostError );
/*
if( poll( stream->pfds + ofs, nfds, stream->pollTimeout ) < 0 )
{
ENSURE_( -1, paUnanticipatedHostError );
}
*/
pthread_testcancel();
if( pollCapture )
{
if( FD_ISSET( captureFd, &readFds ) )
{
FD_CLR( captureFd, &readFds );
pollCapture = 0;
}
/*
if( stream->capture->pfd->revents & POLLIN )
{
--nfds;
++ofs;
pollCapture = 0;
}
*/
else if( stream->playback ) /* Timed out, go on with playback? */
{
/*PA_DEBUG(( "%s: Trying to poll again for capture frames, pollTimeout: %d\n",
__FUNCTION__, stream->pollTimeout ));*/
}
}
if( pollPlayback )
{
if( FD_ISSET( playbackFd, &writeFds ) )
{
FD_CLR( playbackFd, &writeFds );
pollPlayback = 0;
}
/*
if( stream->playback->pfd->revents & POLLOUT )
{
--nfds;
pollPlayback = 0;
}
*/
else if( stream->capture ) /* Timed out, go on with capture? */
{
/*PA_DEBUG(( "%s: Trying to poll again for playback frames, pollTimeout: %d\n\n",
__FUNCTION__, stream->pollTimeout ));*/
}
}
}
if( stream->capture )
{
ENSURE_( ioctl( captureFd, SNDCTL_DSP_GETISPACE, &bufInfo ), paUnanticipatedHostError );
captureAvail = bufInfo.fragments * stream->capture->hostFrames;
if( !captureAvail )
PA_DEBUG(( "%s: captureAvail: 0\n", __FUNCTION__ ));
captureAvail = captureAvail == 0 ? INT_MAX : captureAvail; /* Disregard if zero */
}
if( stream->playback )
{
ENSURE_( ioctl( playbackFd, SNDCTL_DSP_GETOSPACE, &bufInfo ), paUnanticipatedHostError );
playbackAvail = bufInfo.fragments * stream->playback->hostFrames;
if( !playbackAvail )
{
PA_DEBUG(( "%s: playbackAvail: 0\n", __FUNCTION__ ));
}
playbackAvail = playbackAvail == 0 ? INT_MAX : playbackAvail; /* Disregard if zero */
}
commonAvail = PA_MIN( captureAvail, playbackAvail );
if( commonAvail == INT_MAX )
commonAvail = 0;
commonAvail -= commonAvail % stream->framesPerHostBuffer;
assert( commonAvail != INT_MAX );
assert( commonAvail >= 0 );
*frames = commonAvail;
error:
return result;
}
/** Prepare stream for capture/playback.
*
* In order to synchronize capture and playback properly we use the SETTRIGGER command.
*/
static PaError PaOssStream_Prepare( PaOssStream *stream )
{
PaError result = paNoError;
int enableBits = 0;
if( stream->triggered )
return result;
if( stream->playback )
{
size_t bufSz = PaOssStreamComponent_BufferSize( stream->playback );
memset( stream->playback->buffer, 0, bufSz );
/* Looks like we have to turn off blocking before we try this, but if we don't fill the buffer
* OSS will complain. */
PA_ENSURE( ModifyBlocking( stream->playback->fd, 0 ) );
while (1)
{
if( write( stream->playback->fd, stream->playback->buffer, bufSz ) < 0 )
break;
}
PA_ENSURE( ModifyBlocking( stream->playback->fd, 1 ) );
}
if( stream->sharedDevice )
{
enableBits = PCM_ENABLE_INPUT | PCM_ENABLE_OUTPUT;
ENSURE_( ioctl( stream->capture->fd, SNDCTL_DSP_SETTRIGGER, &enableBits ), paUnanticipatedHostError );
}
else
{
if( stream->capture )
{
enableBits = PCM_ENABLE_INPUT;
ENSURE_( ioctl( stream->capture->fd, SNDCTL_DSP_SETTRIGGER, &enableBits ), paUnanticipatedHostError );
}
if( stream->playback )
{
enableBits = PCM_ENABLE_OUTPUT;
ENSURE_( ioctl( stream->playback->fd, SNDCTL_DSP_SETTRIGGER, &enableBits ), paUnanticipatedHostError );
}
}
/* Ok, we have triggered the stream */
stream->triggered = 1;
error:
return result;
}
/** Stop audio processing
*
*/
static PaError PaOssStream_Stop( PaOssStream *stream, int abort )
{
PaError result = paNoError;
/* Looks like the only safe way to stop audio without reopening the device is SNDCTL_DSP_POST.
* Also disable capture/playback till the stream is started again */
if( stream->capture )
{
ENSURE_( ioctl( stream->capture->fd, SNDCTL_DSP_POST, 0 ), paUnanticipatedHostError );
}
if( stream->playback && !stream->sharedDevice )
{
ENSURE_( ioctl( stream->playback->fd, SNDCTL_DSP_POST, 0 ), paUnanticipatedHostError );
}
error:
return result;
}
/** Clean up after thread exit.
*
* Aspect StreamState: If the user has registered a streamFinishedCallback it will be called here
*/
static void OnExit( void *data )
{
PaOssStream *stream = (PaOssStream *) data;
assert( data );
PaUtil_ResetCpuLoadMeasurer( &stream->cpuLoadMeasurer );
PaOssStream_Stop( stream, stream->callbackAbort );
PA_DEBUG(( "OnExit: Stoppage\n" ));
/* Eventually notify user all buffers have played */
if( stream->streamRepresentation.streamFinishedCallback )
stream->streamRepresentation.streamFinishedCallback( stream->streamRepresentation.userData );
stream->callbackAbort = 0; /* Clear state */
stream->isActive = 0;
}
static PaError SetUpBuffers( PaOssStream *stream, unsigned long framesAvail )
{
PaError result = paNoError;
if( stream->capture )
{
PaUtil_SetInterleavedInputChannels( &stream->bufferProcessor, 0, stream->capture->buffer,
stream->capture->hostChannelCount );
PaUtil_SetInputFrameCount( &stream->bufferProcessor, framesAvail );
}
if( stream->playback )
{
PaUtil_SetInterleavedOutputChannels( &stream->bufferProcessor, 0, stream->playback->buffer,
stream->playback->hostChannelCount );
PaUtil_SetOutputFrameCount( &stream->bufferProcessor, framesAvail );
}
return result;
}
/** Thread procedure for callback processing.
*
* Aspect StreamState: StartStream will wait on this to initiate audio processing, useful in case the
* callback should be used for buffer priming. When the stream is cancelled a separate function will
* take care of the transition to the Callback Finished state (the stream isn't considered Stopped
* before StopStream() or AbortStream() are called).
*/
static void *PaOSS_AudioThreadProc( void *userData )
{
PaError result = paNoError;
PaOssStream *stream = (PaOssStream*)userData;
unsigned long framesAvail, framesProcessed;
int callbackResult = paContinue;
int triggered = stream->triggered; /* See if SNDCTL_DSP_TRIGGER has been issued already */
int initiateProcessing = triggered; /* Already triggered? */
PaStreamCallbackFlags cbFlags = 0; /* We might want to keep state across iterations */
PaStreamCallbackTimeInfo timeInfo = {0,0,0}; /* TODO: IMPLEMENT ME */
/*
#if ( SOUND_VERSION > 0x030904 )
audio_errinfo errinfo;
#endif
*/
assert( stream );
pthread_cleanup_push( &OnExit, stream ); /* Execute OnExit when exiting */
/* The first time the stream is started we use SNDCTL_DSP_TRIGGER to accurately start capture and
* playback in sync, when the stream is restarted after being stopped we simply start by reading/
* writing.
*/
PA_ENSURE( PaOssStream_Prepare( stream ) );
/* If we are to initiate processing implicitly by reading/writing data, we start off in blocking mode */
if( initiateProcessing )
{
/* Make sure devices are in blocking mode */
if( stream->capture )
ModifyBlocking( stream->capture->fd, 1 );
if( stream->playback )
ModifyBlocking( stream->playback->fd, 1 );
}
while( 1 )
{
pthread_testcancel();
if( stream->callbackStop && callbackResult == paContinue )
{
PA_DEBUG(( "Setting callbackResult to paComplete\n" ));
callbackResult = paComplete;
}
/* Aspect StreamState: Because of the messy OSS scheme we can't explicitly trigger device start unless
* the stream has been recently started, we will have to go right ahead and read/write in blocking
* fashion to trigger operation. Therefore we begin with processing one host buffer before we switch
* to non-blocking mode.
*/
if( !initiateProcessing )
{
PA_ENSURE( PaOssStream_WaitForFrames( stream, &framesAvail ) ); /* Wait on available frames */
assert( framesAvail % stream->framesPerHostBuffer == 0 );
}
else
{
framesAvail = stream->framesPerHostBuffer;
}
while( framesAvail > 0 )
{
unsigned long frames = framesAvail;
pthread_testcancel();
PaUtil_BeginCpuLoadMeasurement( &stream->cpuLoadMeasurer );
/* Read data */
if ( stream->capture )
{
PA_ENSURE( PaOssStreamComponent_Read( stream->capture, &frames ) );
assert( frames == framesAvail );
}
#if ( SOUND_VERSION >= 0x030904 )
/*
Check with OSS to see if there have been any under/overruns
since last time we checked.
*/
/*
if( ioctl( stream->deviceHandle, SNDCTL_DSP_GETERROR, &errinfo ) >= 0 )
{
if( errinfo.play_underruns )
cbFlags |= paOutputUnderflow ;
if( errinfo.record_underruns )
cbFlags |= paInputUnderflow ;
}
else
PA_DEBUG(( "SNDCTL_DSP_GETERROR command failed: %s\n", strerror( errno ) ));
*/
#endif
PaUtil_BeginBufferProcessing( &stream->bufferProcessor, &timeInfo,
cbFlags );
cbFlags = 0;
PA_ENSURE( SetUpBuffers( stream, framesAvail ) );
framesProcessed = PaUtil_EndBufferProcessing( &stream->bufferProcessor,
&callbackResult );
assert( framesProcessed == framesAvail );
PaUtil_EndCpuLoadMeasurement( &stream->cpuLoadMeasurer, framesProcessed );
if ( stream->playback )
{
frames = framesAvail;
PA_ENSURE( PaOssStreamComponent_Write( stream->playback, &frames ) );
assert( frames == framesAvail );
/* TODO: handle bytesWritten != bytesRequested (slippage?) */
}
framesAvail -= framesProcessed;
stream->framesProcessed += framesProcessed;
if( callbackResult != paContinue )
break;
}
if( initiateProcessing || !triggered )
{
/* Non-blocking */
if( stream->capture )
PA_ENSURE( ModifyBlocking( stream->capture->fd, 0 ) );
if( stream->playback && !stream->sharedDevice )
PA_ENSURE( ModifyBlocking( stream->playback->fd, 0 ) );
initiateProcessing = 0;
sem_post( &stream->semaphore );
}
if( callbackResult != paContinue )
{
stream->callbackAbort = callbackResult == paAbort;
if( stream->callbackAbort || PaUtil_IsBufferProcessorOutputEmpty( &stream->bufferProcessor ) )
break;
}
}
pthread_cleanup_pop( 1 );
error:
pthread_exit( NULL );
}
/** Close the stream.
*
*/
static PaError CloseStream( PaStream* s )
{
PaError result = paNoError;
PaOssStream *stream = (PaOssStream*)s;
assert( stream );
PaUtil_TerminateBufferProcessor( &stream->bufferProcessor );
PaOssStream_Terminate( stream );
return result;
}
/** Start the stream.
*
* Aspect StreamState: After returning, the stream shall be in the Active state, implying that an eventual
* callback will be repeatedly called in a separate thread. If a separate thread is started this function
* will block untill it has started processing audio, otherwise audio processing is started directly.
*/
static PaError StartStream( PaStream *s )
{
PaError result = paNoError;
PaOssStream *stream = (PaOssStream*)s;
stream->isActive = 1;
stream->isStopped = 0;
stream->lastPosPtr = 0;
stream->lastStreamBytes = 0;
stream->framesProcessed = 0;
/* only use the thread for callback streams */
if( stream->bufferProcessor.streamCallback )
{
PA_ENSURE( PaUtil_StartThreading( &stream->threading, &PaOSS_AudioThreadProc, stream ) );
sem_wait( &stream->semaphore );
}
else
PA_ENSURE( PaOssStream_Prepare( stream ) );
error:
return result;
}
static PaError RealStop( PaOssStream *stream, int abort )
{
PaError result = paNoError;
if( stream->callbackMode )
{
if( abort )
stream->callbackAbort = 1;
else
stream->callbackStop = 1;
PA_ENSURE( PaUtil_CancelThreading( &stream->threading, !abort, NULL ) );
stream->callbackStop = stream->callbackAbort = 0;
}
else
PA_ENSURE( PaOssStream_Stop( stream, abort ) );
stream->isStopped = 1;
error:
return result;
}
/** Stop the stream.
*
* Aspect StreamState: This will cause the stream to transition to the Stopped state, playing all enqueued
* buffers.
*/
static PaError StopStream( PaStream *s )
{
return RealStop( (PaOssStream *)s, 0 );
}
/** Abort the stream.
*
* Aspect StreamState: This will cause the stream to transition to the Stopped state, discarding all enqueued
* buffers. Note that the buffers are not currently correctly discarded, this is difficult without closing
* the OSS device.
*/
static PaError AbortStream( PaStream *s )
{
return RealStop( (PaOssStream *)s, 1 );
}
/** Is the stream in the Stopped state.
*
*/
static PaError IsStreamStopped( PaStream *s )
{
PaOssStream *stream = (PaOssStream*)s;
return (stream->isStopped);
}
/** Is the stream in the Active state.
*
*/
static PaError IsStreamActive( PaStream *s )
{
PaOssStream *stream = (PaOssStream*)s;
return (stream->isActive);
}
static PaTime GetStreamTime( PaStream *s )
{
PaOssStream *stream = (PaOssStream*)s;
count_info info;
int delta;
if( stream->playback ) {
if( ioctl( stream->playback->fd, SNDCTL_DSP_GETOPTR, &info) == 0 ) {
delta = ( info.bytes - stream->lastPosPtr ) /* & 0x000FFFFF*/;
return (float)(stream->lastStreamBytes + delta) / PaOssStreamComponent_FrameSize( stream->playback ) / stream->sampleRate;
}
}
else {
if (ioctl( stream->capture->fd, SNDCTL_DSP_GETIPTR, &info) == 0) {
delta = (info.bytes - stream->lastPosPtr) /*& 0x000FFFFF*/;
return (float)(stream->lastStreamBytes + delta) / PaOssStreamComponent_FrameSize( stream->capture ) / stream->sampleRate;
}
}
/* the ioctl failed, but we can still give a coarse estimate */
return stream->framesProcessed / stream->sampleRate;
}
static double GetStreamCpuLoad( PaStream* s )
{
PaOssStream *stream = (PaOssStream*)s;
return PaUtil_GetCpuLoad( &stream->cpuLoadMeasurer );
}
/*
As separate stream interfaces are used for blocking and callback
streams, the following functions can be guaranteed to only be called
for blocking streams.
*/
static PaError ReadStream( PaStream* s,
void *buffer,
unsigned long frames )
{
PaOssStream *stream = (PaOssStream*)s;
int bytesRequested, bytesRead;
unsigned long framesRequested;
void *userBuffer;
/* If user input is non-interleaved, PaUtil_CopyInput will manipulate the channel pointers,
* so we copy the user provided pointers */
if( stream->bufferProcessor.userInputIsInterleaved )
userBuffer = buffer;
else /* Copy channels into local array */
{
userBuffer = stream->capture->userBuffers;
memcpy( (void *)userBuffer, buffer, sizeof (void *) * stream->capture->userChannelCount );
}
while( frames )
{
framesRequested = PA_MIN( frames, stream->capture->hostFrames );
bytesRequested = framesRequested * PaOssStreamComponent_FrameSize( stream->capture );
bytesRead = read( stream->capture->fd, stream->capture->buffer, bytesRequested );
if ( bytesRequested != bytesRead )
return paUnanticipatedHostError;
PaUtil_SetInputFrameCount( &stream->bufferProcessor, stream->capture->hostFrames );
PaUtil_SetInterleavedInputChannels( &stream->bufferProcessor, 0, stream->capture->buffer, stream->capture->hostChannelCount );
PaUtil_CopyInput( &stream->bufferProcessor, &userBuffer, framesRequested );
frames -= framesRequested;
}
return paNoError;
}
static PaError WriteStream( PaStream *s, const void *buffer, unsigned long frames )
{
PaOssStream *stream = (PaOssStream*)s;
int bytesRequested, bytesWritten;
unsigned long framesConverted;
const void *userBuffer;
/* If user output is non-interleaved, PaUtil_CopyOutput will manipulate the channel pointers,
* so we copy the user provided pointers */
if( stream->bufferProcessor.userOutputIsInterleaved )
userBuffer = buffer;
else
{
/* Copy channels into local array */
userBuffer = stream->playback->userBuffers;
memcpy( (void *)userBuffer, buffer, sizeof (void *) * stream->playback->userChannelCount );
}
while( frames )
{
PaUtil_SetOutputFrameCount( &stream->bufferProcessor, stream->playback->hostFrames );
PaUtil_SetInterleavedOutputChannels( &stream->bufferProcessor, 0, stream->playback->buffer, stream->playback->hostChannelCount );
framesConverted = PaUtil_CopyOutput( &stream->bufferProcessor, &userBuffer, frames );
frames -= framesConverted;
bytesRequested = framesConverted * PaOssStreamComponent_FrameSize( stream->playback );
bytesWritten = write( stream->playback->fd, stream->playback->buffer, bytesRequested );
if ( bytesRequested != bytesWritten )
return paUnanticipatedHostError;
}
return paNoError;
}
static signed long GetStreamReadAvailable( PaStream* s )
{
PaOssStream *stream = (PaOssStream*)s;
audio_buf_info info;
if( ioctl( stream->capture->fd, SNDCTL_DSP_GETISPACE, &info ) < 0 )
return paUnanticipatedHostError;
return info.fragments * stream->capture->hostFrames;
}
/* TODO: Compute number of allocated bytes somewhere else, can we use ODELAY with capture */
static signed long GetStreamWriteAvailable( PaStream* s )
{
PaOssStream *stream = (PaOssStream*)s;
int delay = 0;
if( ioctl( stream->playback->fd, SNDCTL_DSP_GETODELAY, &delay ) < 0 )
return paUnanticipatedHostError;
return (PaOssStreamComponent_BufferSize( stream->playback ) - delay) / PaOssStreamComponent_FrameSize( stream->playback );
}
| {
"content_hash": "8dba1f0f39ebdf9f043dad3f8bb1a4ae",
"timestamp": "",
"source": "github",
"line_count": 1889,
"max_line_length": 146,
"avg_line_length": 34.63366860772896,
"alnum_prop": 0.6456139278235483,
"repo_name": "pikoro/sharpsdr",
"id": "169a640acc271931937cbef545fa0f5b88d39530",
"size": "67071",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PortAudio/tags/v19-devel-pre-restructure/pa_unix_oss/pa_unix_oss.c",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11088"
},
{
"name": "C",
"bytes": "30751426"
},
{
"name": "C#",
"bytes": "760314"
},
{
"name": "C++",
"bytes": "6618685"
},
{
"name": "CMake",
"bytes": "61471"
},
{
"name": "HTML",
"bytes": "604536"
},
{
"name": "Java",
"bytes": "173104"
},
{
"name": "Makefile",
"bytes": "24601"
},
{
"name": "Python",
"bytes": "215120"
},
{
"name": "Shell",
"bytes": "9763913"
}
],
"symlink_target": ""
} |
#include "_eth_include.h"
#ifdef _ETH // the Ethernet device is present
/****************************************************************************
* Function: _EthRxAckBuffer
*
* PreCondition: EthRxSetBufferSize, EthRxBuffersAppend, EthRxGetPacket should have been called.
*
* Input: pBuff - buffer/packet to be acknowledged or NULL
* ackFnc - function to be called for the acknowledged buffers or NULL
* fParam - argument to be used in ackFnc callback
*
* Output: ETH_RES_OK - success
* ETH_RES_PACKET_QUEUED - there are packets in the receiving queue
* ETH_RES_NO_PACKET - no packets available in the receiving queue
*
* Side Effects: None
*
* Overview: This function acknowledges a received buffer/packet.
* The supplied packet has to have been previously received otherwise the call will fail or the packet will be discarded.
* When pBuff==NULL, all currently received packets will be acknowledged.
* The ackFnc, if !NULL, will be called for each buffer within the packet in turn.
*
* Note: - Any received packet has to be acknowledged, otherwise the Eth API will run out of descriptors.
* - pBuff must be the pointer to the first buffer in the packet, if the packet spans multiple buffers.
* - ackFnc is just a helper that allows the application to call an acknowledge function for each received buffer in turn.
*****************************************************************************/
static eEthRes _EthRxAckBuffer(const void* pBuff, pEthBuffAck ackFnc, void* fParam)
{
eEthRes res;
sEthDNode* pEDcpt;
sEthDcptList ackList={0, 0};
sEthDcptList stickyList={0, 0};
res=_EthAckPacket(pBuff, &_EthRxBusyList, &ackList, ackFnc, fParam);
while((pEDcpt=SlRemoveHead(&ackList)))
{
if(pEDcpt->hwDcpt.hdr.sticky)
{
// add it to the busy list...
pEDcpt->hwDcpt.hdr.SOP=pEDcpt->hwDcpt.hdr.EOP=pEDcpt->hwDcpt.hdr.rx_wack=0;
pEDcpt->hwDcpt.hdr.EOWN=1; // hw owned
SlAddTail(&stickyList, pEDcpt);
}
else
{
SlAddTail(&_EthRxFreeList, pEDcpt);
if(!pEDcpt->hwDcpt.hdr.rx_nack)
{
ETHCON1SET=_ETHCON1_BUFCDEC_MASK; // acknowledge the hardware;
}
}
}
if(!SlIsEmpty(&stickyList))
{
_EthAppendBusyList(&_EthRxBusyList, &stickyList, 1); // append the descriptors that have valid buffers
}
return res;
}
/****************************************************************************
* Function: EthRxAcknowledgeBuffer
*
* PreCondition: EthRxSetBufferSize, EthRxBuffersAppend, EthRxGetPacket should have been called.
*
* Input: pBuff - buffer/packet to be acknowledged or NULL
* ackFnc - function to be called for the acknowledged buffers or NULL
*
* Output: ETH_RES_OK - success
* ETH_RES_PACKET_QUEUED - there are packets in the receiving queue
* ETH_RES_NO_PACKET - no packets available in the receiving queue
*
* Side Effects: None
*
* Overview: This function acknowledges a received buffer/packet.
* The supplied packet has to have been previously received otherwise the call will fail or the packet will be discarded.
* When pBuff==NULL, all currently received packets will be acknowledged.
* The ackFnc, if !NULL, will be called for each buffer within the packet in turn.
*
* Note: - Any received packet has to be acknowledged, otherwise the Eth API will run out of descriptors.
* - pBuff must be the pointer to the first buffer in the packet, if the packet spans multiple buffers.
* - ackFnc is just a helper that allows the application to call an acknowledge function for each received buffer in turn.
*****************************************************************************/
eEthRes EthRxAcknowledgeBuffer(const void* pBuff, pEthBuffAck ackFnc, void* fParam)
{
return _EthRxAckBuffer(pBuff, ackFnc, fParam);
}
/****************************************************************************
* Function: EthTxAcknowledgeBuffer
*
* PreCondition: EthTxSendPacket should have been called.
*
* Input: pBuff - buffer/packet to be acknowledged or NULL.
* ackFnc - function to be called for the acknowledged buffers or NULL
* fParam - argument to be used in function call
*
* Output: ETH_RES_OK - success
* ETH_RES_PACKET_QUEUED - there are packets scheduled to be transmitted
* ETH_RES_NO_PACKET - no packets scheduled for transmit
*
* Side Effects: None
*
* Overview: This function acknowledges a transmitted buffer/packet.
* The transmission of this packet has to have been completed otherwise the call will fail.
* When pBuff==NULL, all currently transmitted packets will be acknowledged.
* The ackFnc, if !NULL, will be called for each buffer within the packet in turn.
*
* Note: - Any transmitted packet has to be acknowledged, otherwise the Eth API will run out of transmission descriptors.
* - pBuff must be the pointer to the first buffer in the packet, if the packet spans multiple buffers.
* - ackFnc is just a helper that allows the application to acknowledge the transmitted buffers without the need
* to maintain a list of buffers scheduled for transmission (this list is maintained by the Eth library anyway).
* Useful especially when the transmission result is not really needed and the function is called with pPkt=0.
*****************************************************************************/
eEthRes EthTxAcknowledgeBuffer(const void* pBuff, pEthBuffAck ackFnc, void* fParam)
{
return _EthAckPacket(pBuff, &_EthTxBusyList, &_EthTxFreeList, ackFnc, fParam);
}
/****************************************************************************
* Function: _EthAckPacket
*
* PreCondition: None
*
* Input: pPkt - buffer/packet to be acknowledged or NULL
* pRemList - list to look for done packets and to remove the packets from
* pAddList - list were to add the removed packets
* ackFnc - function to be called for each acknowledged buffer in turn
* fParam - function argument
*
* Output: ETH_RES_OK - success
* ETH_RES_PACKET_QUEUED - there are packets queued
* ETH_RES_NO_PACKET - no packets available in the queue
*
* Side Effects: None
*
* Overview: This function acknowledges a packet.
* The supplied packet has to have been completed otherwise the call will fail.
* When pPkt==NULL, all packets with EOWN==0 will be acknowledged.
*
* Note: None
*****************************************************************************/
eEthRes _EthAckPacket(const void* pPkt, sEthDcptList* pRemList, sEthDcptList* pAddList, pEthBuffAck ackFnc, void* fParam)
{
sEthDNode *pEDcpt;
sEthDNode *prev, *next;
int nAcks;
int pktFound;
int buffIx;
prev=next=0;
nAcks=pktFound=0;
for(pEDcpt=pRemList->head; pEDcpt!=0; pEDcpt=next)
{
if(pEDcpt->hwDcpt.hdr.SOP && (pPkt==0 || pEDcpt->hwDcpt.pEDBuff==(unsigned char*)KVA_TO_PA(pPkt)))
{ // found the beg of a packet
pktFound=1;
if(pEDcpt->hwDcpt.hdr.EOWN)
{
break; // hw not done with it
}
next=pEDcpt;
buffIx=0;
do
{
pEDcpt=next;
next=pEDcpt->next;
while(pEDcpt->hwDcpt.hdr.EOWN); // shouldn't happen
SlAddTail(pAddList, pEDcpt); // ack this node
if(ackFnc)
{
void* pBuff;
pBuff=(pEDcpt->hwDcpt.hdr.kv0?PA_TO_KVA0((int)pEDcpt->hwDcpt.pEDBuff):PA_TO_KVA1((int)pEDcpt->hwDcpt.pEDBuff));
(*ackFnc)(pBuff, buffIx++, fParam); // call user's acknowledge
}
}while(!pEDcpt->hwDcpt.hdr.EOP);
nAcks++;
// reconstruct the removed list
if(prev)
{
prev->next=next;
// prev->next_ed shouldn't matter here!
}
else
{
pRemList->head=next;
}
if(pPkt)
{ // done, just one packet ack-ed
break; // done
}
}
else
{
prev=pEDcpt;
next=pEDcpt->next;
}
}
return nAcks?ETH_RES_OK:(pktFound?ETH_RES_PACKET_QUEUED:ETH_RES_NO_PACKET);
}
/********************
* legacy functions
***********************/
/****************************************************************************
* Function: EthRxAckBuffer
*
* PreCondition: EthRxSetBufferSize, EthRxBuffersAppend, EthRxGetPacket should have been called.
*
* Input: pBuff - buffer/packet to be acknowledged or NULL
* ackFnc - function to be called for the acknowledged buffers or NULL
*
* Output: ETH_RES_OK - success
* ETH_RES_PACKET_QUEUED - there are packets in the receiving queue
* ETH_RES_NO_PACKET - no packets available in the receiving queue
*
* Side Effects: None
*
* Overview: This function acknowledges a received buffer/packet.
* The supplied packet has to have been previously received otherwise the call will fail or the packet will be discarded.
* When pBuff==NULL, all currently received packets will be acknowledged.
* The ackFnc, if !NULL, will be called for each buffer within the packet in turn.
*
* Note: - Any received packet has to be acknowledged, otherwise the Eth API will run out of descriptors.
* - pBuff must be the pointer to the first buffer in the packet, if the packet spans multiple buffers.
* - ackFnc is just a helper that allows the application to call an acknowledge function for each received buffer in turn.
*****************************************************************************/
eEthRes EthRxAckBuffer(const void* pBuff, pEthPktAckF ackFnc)
{
return _EthRxAckBuffer(pBuff, (pEthBuffAck)ackFnc, 0);
}
/****************************************************************************
* Function: EthTxAckBuffer
*
* PreCondition: EthTxSendPacket should have been called.
*
* Input: pBuff - buffer/packet to be acknowledged or NULL.
* ackFnc - function to be called for the acknowledged buffers or NULL
*
* Output: ETH_RES_OK - success
* ETH_RES_PACKET_QUEUED - there are packets scheduled to be transmitted
* ETH_RES_NO_PACKET - no packets scheduled for transmit
*
* Side Effects: None
*
* Overview: This function acknowledges a transmitted buffer/packet.
* The transmission of this packet has to have been completed otherwise the call will fail.
* When pBuff==NULL, all currently transmitted packets will be acknowledged.
* The ackFnc, if !NULL, will be called for each buffer within the packet in turn.
*
* Note: - Any transmitted packet has to be acknowledged, otherwise the Eth API will run out of transmission descriptors.
* - pBuff must be the pointer to the first buffer in the packet, if the packet spans multiple buffers.
* - ackFnc is just a helper that allows the application to acknowledge the transmitted buffers without the need
* to maintain a list of buffers scheduled for transmission (this list is maintained by the Eth library anyway).
* Useful especially when the transmission result is not really needed and the function is called with pPkt=0.
*****************************************************************************/
eEthRes EthTxAckBuffer(const void* pBuff, pEthPktAckF ackFnc)
{
return _EthAckPacket(pBuff, &_EthTxBusyList, &_EthTxFreeList, (pEthBuffAck)ackFnc, 0);
}
#endif // _ETH
| {
"content_hash": "a3fafbd51342290190e4d2d9aa1b782a",
"timestamp": "",
"source": "github",
"line_count": 289,
"max_line_length": 139,
"avg_line_length": 41.83737024221453,
"alnum_prop": 0.5949880076089653,
"repo_name": "sergev/vak-opensource",
"id": "754fa01fda37f72e138c4863419ad33c8121c51c",
"size": "12091",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "microcontrollers/microchip-eth/ack_packet.c",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "1C Enterprise",
"bytes": "389"
},
{
"name": "APL",
"bytes": "177"
},
{
"name": "Ada",
"bytes": "4425"
},
{
"name": "Assembly",
"bytes": "2089449"
},
{
"name": "Awk",
"bytes": "221"
},
{
"name": "BASIC",
"bytes": "2604"
},
{
"name": "Batchfile",
"bytes": "3032"
},
{
"name": "C",
"bytes": "20925647"
},
{
"name": "C#",
"bytes": "986"
},
{
"name": "C++",
"bytes": "2614423"
},
{
"name": "CMake",
"bytes": "6854"
},
{
"name": "COBOL",
"bytes": "476314"
},
{
"name": "Common Lisp",
"bytes": "10559"
},
{
"name": "D",
"bytes": "15701"
},
{
"name": "DIGITAL Command Language",
"bytes": "1965"
},
{
"name": "Dart",
"bytes": "1266"
},
{
"name": "Erlang",
"bytes": "377"
},
{
"name": "F#",
"bytes": "4942"
},
{
"name": "Forth",
"bytes": "18207"
},
{
"name": "Fortran",
"bytes": "292582"
},
{
"name": "GDB",
"bytes": "809"
},
{
"name": "Gnuplot",
"bytes": "8810"
},
{
"name": "Go",
"bytes": "84054"
},
{
"name": "HTML",
"bytes": "257212"
},
{
"name": "Haskell",
"bytes": "14698"
},
{
"name": "Java",
"bytes": "246738"
},
{
"name": "JavaScript",
"bytes": "24526"
},
{
"name": "Julia",
"bytes": "2285"
},
{
"name": "Kotlin",
"bytes": "1114"
},
{
"name": "LLVM",
"bytes": "2420"
},
{
"name": "Lex",
"bytes": "24747"
},
{
"name": "Limbo",
"bytes": "27127"
},
{
"name": "M",
"bytes": "441"
},
{
"name": "MATLAB",
"bytes": "6132"
},
{
"name": "Makefile",
"bytes": "480896"
},
{
"name": "Meson",
"bytes": "701"
},
{
"name": "Modula-2",
"bytes": "112"
},
{
"name": "Module Management System",
"bytes": "8062"
},
{
"name": "Nim",
"bytes": "399"
},
{
"name": "Objective-C",
"bytes": "10342"
},
{
"name": "PHP",
"bytes": "36349"
},
{
"name": "Pascal",
"bytes": "92870"
},
{
"name": "Pawn",
"bytes": "501"
},
{
"name": "Perl",
"bytes": "126464"
},
{
"name": "Processing",
"bytes": "29556"
},
{
"name": "Python",
"bytes": "145601"
},
{
"name": "QMake",
"bytes": "1659"
},
{
"name": "RPC",
"bytes": "2644"
},
{
"name": "Roff",
"bytes": "73949"
},
{
"name": "Ruby",
"bytes": "315"
},
{
"name": "Rust",
"bytes": "18231"
},
{
"name": "SWIG",
"bytes": "3784"
},
{
"name": "Scala",
"bytes": "870"
},
{
"name": "Scheme",
"bytes": "47093"
},
{
"name": "Scilab",
"bytes": "34671"
},
{
"name": "Shell",
"bytes": "41264"
},
{
"name": "Swift",
"bytes": "1486"
},
{
"name": "SystemVerilog",
"bytes": "22907"
},
{
"name": "Tcl",
"bytes": "57048"
},
{
"name": "TypeScript",
"bytes": "57"
},
{
"name": "V",
"bytes": "2495"
},
{
"name": "VBA",
"bytes": "180"
},
{
"name": "VHDL",
"bytes": "469591"
},
{
"name": "Verilog",
"bytes": "1610969"
},
{
"name": "XProc",
"bytes": "87892"
},
{
"name": "Yacc",
"bytes": "170392"
},
{
"name": "sed",
"bytes": "252"
}
],
"symlink_target": ""
} |
function lib() { // eslint-disable-line no-unused-vars
var exponentByPrefix = {
'k': 3,
'm': 6,
'b': 9,
't': 12
};
function isNoZeroLeadingNumber(string) {
var decimalSeparator = $A.get("$Locale.decimal");
var reg = new RegExp('(^\\s*(\\+|\\-)?\\s*)\\' + decimalSeparator + '\\d*(K|B|M|T)?$');
return reg.test(string);
}
function injectZeroBeforeDecimalSeparator (string) {
var decimalSeparator = $A.get("$Locale.decimal");
var numberParts = string.split(decimalSeparator);
return numberParts[0] + '0' + decimalSeparator + numberParts[1];
}
return {
formatNumber: function (number, formatter) {
var numberFormat = this.getNumberFormat(formatter);
if (!$A.util.isUndefinedOrNull(number) && !isNaN(number)) {
return numberFormat.format(number);
}
return '';
},
getNumberFormat: function (formatter) {
if (typeof formatter === 'string') {
try {
return $A.localizationService.getNumberFormat(formatter);
} catch (e) {
// invalid number format error
// use default instead and show a warning on console
return $A.localizationService.getDefaultNumberFormat();
}
}
return $A.localizationService.getDefaultNumberFormat();
},
unFormatNumber: function (string ) {
if (this.isNumber(string)) {
return string;
}
var decimalSeparator = $A.get("$Locale.decimal");
var currencySymbol = $A.get("$Locale.currency");
var stringOriginal = string;
string = string.replace(currencySymbol, '');
if (isNoZeroLeadingNumber(string)) {
string = injectZeroBeforeDecimalSeparator(string);
}
if (decimalSeparator !== '.') {
string = string.replace(/\./g, '').replace(decimalSeparator, '.');
}
var numberOnlyPart = string.replace(/[^0-9\.]+/g, '');
var value =
(((string.split('-').length + Math.min(string.split('(').length - 1, string.split(')').length - 1)) % 2) ? 1 : -1) *
Number(numberOnlyPart);
// find if contains kmtb.
var exponentKey = Object.keys(exponentByPrefix).find(function(abbreviation) {
// currencySymbol could be null or undefined or ''
var currencyExp = (typeof currencySymbol === "string" && currencySymbol.length > 0) ? '|(\\' + currencySymbol + ')' : '';
var regExp = new RegExp('[^a-zA-Z]' + abbreviation + '(?:\\)' + currencyExp + ' ?(?:\\))?)?$', 'i');
return stringOriginal.match(regExp);
});
if (exponentKey) {
var exponent = exponentByPrefix[exponentKey];
// W-4606483
// to avoid 4.1 * 1000000 = 4099999.9999999995
var decimalSeparatorIndex = numberOnlyPart.indexOf('.');
var fractionalDigitsNeeded = decimalSeparatorIndex >= 0 ? (numberOnlyPart.length - (decimalSeparatorIndex + exponent + 1)) : 0;
if (fractionalDigitsNeeded < 0 || fractionalDigitsNeeded > 100) {
fractionalDigitsNeeded = 0;
}
return parseFloat(((value * Math.pow(10, exponent)).toFixed(fractionalDigitsNeeded)));
} else {
return value;
}
},
isNumber: function (number) {
return $A.util.isNumber(number);
},
isFormattedNumber: function (string) {
var decimalSeparator = $A.get("$Locale.decimal");
var groupingSeparator = $A.get("$Locale.grouping");
var const1 = '(?!(K|B|M|T))';
// This regexp math with any formatted number or any possible formatted number
// Match with :
// never start with letter(k,b,m,t,(decimalSeparator))
// everything that start with ( space* (+|-) spaces* )
// any repeat of #{1}(groupingSeparator)#{0,n}
// follow decimalSeparator #{0,maxFractionDigits} (not required)
// ended by any shortcut (K|B|M|T)
// it not case sensitive
var regString = '^' + const1 + '((\\s*(\\+|\\-)?\\s*)' + const1 + ')?' +
'(\\d*(\\' + groupingSeparator + '\\d*)*)*' +
'(\\' + decimalSeparator + '\\d*)?' +
'(K|B|M|T)?$';
var reg = new RegExp(regString, 'i');
return reg.test(string);
}
};
}
| {
"content_hash": "cc69432741cd051b8b5fba2e6fe1515e",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 150,
"avg_line_length": 40.67768595041322,
"alnum_prop": 0.4983746444534742,
"repo_name": "madmax983/aura",
"id": "322c50c0a99f771feefa10e8e7ce665f16e2d266",
"size": "5533",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aura-components/src/main/components/ui/inputNumberLibrary/number.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "782982"
},
{
"name": "GAP",
"bytes": "10087"
},
{
"name": "HTML",
"bytes": "3296233"
},
{
"name": "Java",
"bytes": "9575906"
},
{
"name": "JavaScript",
"bytes": "26838648"
},
{
"name": "PHP",
"bytes": "3345441"
},
{
"name": "Python",
"bytes": "9744"
},
{
"name": "Shell",
"bytes": "20356"
},
{
"name": "XSLT",
"bytes": "1579"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>AdminLTE 2 | General Form Elements</title>
<!-- Tell the browser to be responsive to screen width -->
<meta content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no" name="viewport">
<!-- Bootstrap 3.3.6 -->
<link rel="stylesheet" href="../../bootstrap/css/bootstrap.min.css">
<!-- Font Awesome -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.5.0/css/font-awesome.min.css">
<!-- Ionicons -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/ionicons/2.0.1/css/ionicons.min.css">
<!-- Theme style -->
<link rel="stylesheet" href="../../dist/css/AdminLTE.min.css">
<!-- AdminLTE Skins. Choose a skin from the css/skins
folder instead of downloading all of them to reduce the load. -->
<link rel="stylesheet" href="../../dist/css/skins/_all-skins.min.css">
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body class="hold-transition skin-blue sidebar-mini">
<div class="wrapper">
<header class="main-header">
<!-- Logo -->
<a href="../../index2.html" class="logo">
<!-- mini logo for sidebar mini 50x50 pixels -->
<span class="logo-mini"><b>A</b>LT</span>
<!-- logo for regular state and mobile devices -->
<span class="logo-lg"><b>Admin</b>LTE</span>
</a>
<!-- Header Navbar: style can be found in header.less -->
<nav class="navbar navbar-static-top">
<!-- Sidebar toggle button-->
<a href="#" class="sidebar-toggle" data-toggle="offcanvas" role="button">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</a>
<div class="navbar-custom-menu">
<ul class="nav navbar-nav">
<!-- Messages: style can be found in dropdown.less-->
<li class="dropdown messages-menu">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="fa fa-envelope-o"></i>
<span class="label label-success">4</span>
</a>
<ul class="dropdown-menu">
<li class="header">You have 4 messages</li>
<li>
<!-- inner menu: contains the actual data -->
<ul class="menu">
<li><!-- start message -->
<a href="#">
<div class="pull-left">
<img src="../../dist/img/user2-160x160.jpg" class="img-circle" alt="User Image">
</div>
<h4>
Support Team
<small><i class="fa fa-clock-o"></i> 5 mins</small>
</h4>
<p>Why not buy a new awesome theme?</p>
</a>
</li>
<!-- end message -->
<li>
<a href="#">
<div class="pull-left">
<img src="../../dist/img/user3-128x128.jpg" class="img-circle" alt="User Image">
</div>
<h4>
AdminLTE Design Team
<small><i class="fa fa-clock-o"></i> 2 hours</small>
</h4>
<p>Why not buy a new awesome theme?</p>
</a>
</li>
<li>
<a href="#">
<div class="pull-left">
<img src="../../dist/img/user4-128x128.jpg" class="img-circle" alt="User Image">
</div>
<h4>
Developers
<small><i class="fa fa-clock-o"></i> Today</small>
</h4>
<p>Why not buy a new awesome theme?</p>
</a>
</li>
<li>
<a href="#">
<div class="pull-left">
<img src="../../dist/img/user3-128x128.jpg" class="img-circle" alt="User Image">
</div>
<h4>
Sales Department
<small><i class="fa fa-clock-o"></i> Yesterday</small>
</h4>
<p>Why not buy a new awesome theme?</p>
</a>
</li>
<li>
<a href="#">
<div class="pull-left">
<img src="../../dist/img/user4-128x128.jpg" class="img-circle" alt="User Image">
</div>
<h4>
Reviewers
<small><i class="fa fa-clock-o"></i> 2 days</small>
</h4>
<p>Why not buy a new awesome theme?</p>
</a>
</li>
</ul>
</li>
<li class="footer"><a href="#">See All Messages</a></li>
</ul>
</li>
<!-- Notifications: style can be found in dropdown.less -->
<li class="dropdown notifications-menu">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="fa fa-bell-o"></i>
<span class="label label-warning">10</span>
</a>
<ul class="dropdown-menu">
<li class="header">You have 10 notifications</li>
<li>
<!-- inner menu: contains the actual data -->
<ul class="menu">
<li>
<a href="#">
<i class="fa fa-users text-aqua"></i> 5 new members joined today
</a>
</li>
<li>
<a href="#">
<i class="fa fa-warning text-yellow"></i> Very long description here that may not fit into the
page and may cause design problems
</a>
</li>
<li>
<a href="#">
<i class="fa fa-users text-red"></i> 5 new members joined
</a>
</li>
<li>
<a href="#">
<i class="fa fa-shopping-cart text-green"></i> 25 sales made
</a>
</li>
<li>
<a href="#">
<i class="fa fa-user text-red"></i> You changed your username
</a>
</li>
</ul>
</li>
<li class="footer"><a href="#">View all</a></li>
</ul>
</li>
<!-- Tasks: style can be found in dropdown.less -->
<li class="dropdown tasks-menu">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="fa fa-flag-o"></i>
<span class="label label-danger">9</span>
</a>
<ul class="dropdown-menu">
<li class="header">You have 9 tasks</li>
<li>
<!-- inner menu: contains the actual data -->
<ul class="menu">
<li><!-- Task item -->
<a href="#">
<h3>
Design some buttons
<small class="pull-right">20%</small>
</h3>
<div class="progress xs">
<div class="progress-bar progress-bar-aqua" style="width: 20%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100">
<span class="sr-only">20% Complete</span>
</div>
</div>
</a>
</li>
<!-- end task item -->
<li><!-- Task item -->
<a href="#">
<h3>
Create a nice theme
<small class="pull-right">40%</small>
</h3>
<div class="progress xs">
<div class="progress-bar progress-bar-green" style="width: 40%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100">
<span class="sr-only">40% Complete</span>
</div>
</div>
</a>
</li>
<!-- end task item -->
<li><!-- Task item -->
<a href="#">
<h3>
Some task I need to do
<small class="pull-right">60%</small>
</h3>
<div class="progress xs">
<div class="progress-bar progress-bar-red" style="width: 60%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100">
<span class="sr-only">60% Complete</span>
</div>
</div>
</a>
</li>
<!-- end task item -->
<li><!-- Task item -->
<a href="#">
<h3>
Make beautiful transitions
<small class="pull-right">80%</small>
</h3>
<div class="progress xs">
<div class="progress-bar progress-bar-yellow" style="width: 80%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100">
<span class="sr-only">80% Complete</span>
</div>
</div>
</a>
</li>
<!-- end task item -->
</ul>
</li>
<li class="footer">
<a href="#">View all tasks</a>
</li>
</ul>
</li>
<!-- User Account: style can be found in dropdown.less -->
<li class="dropdown user user-menu">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<img src="../../dist/img/user2-160x160.jpg" class="user-image" alt="User Image">
<span class="hidden-xs">Alexander Pierce</span>
</a>
<ul class="dropdown-menu">
<!-- User image -->
<li class="user-header">
<img src="../../dist/img/user2-160x160.jpg" class="img-circle" alt="User Image">
<p>
Alexander Pierce - Web Developer
<small>Member since Nov. 2012</small>
</p>
</li>
<!-- Menu Body -->
<li class="user-body">
<div class="row">
<div class="col-xs-4 text-center">
<a href="#">Followers</a>
</div>
<div class="col-xs-4 text-center">
<a href="#">Sales</a>
</div>
<div class="col-xs-4 text-center">
<a href="#">Friends</a>
</div>
</div>
<!-- /.row -->
</li>
<!-- Menu Footer-->
<li class="user-footer">
<div class="pull-left">
<a href="#" class="btn btn-default btn-flat">Profile</a>
</div>
<div class="pull-right">
<a href="#" class="btn btn-default btn-flat">Sign out</a>
</div>
</li>
</ul>
</li>
<!-- Control Sidebar Toggle Button -->
<li>
<a href="#" data-toggle="control-sidebar"><i class="fa fa-gears"></i></a>
</li>
</ul>
</div>
</nav>
</header>
<!-- Left side column. contains the logo and sidebar -->
<aside class="main-sidebar">
<!-- sidebar: style can be found in sidebar.less -->
<section class="sidebar">
<!-- Sidebar user panel -->
<div class="user-panel">
<div class="pull-left image">
<img src="../../dist/img/user2-160x160.jpg" class="img-circle" alt="User Image">
</div>
<div class="pull-left info">
<p>Alexander Pierce</p>
<a href="#"><i class="fa fa-circle text-success"></i> Online</a>
</div>
</div>
<!-- search form -->
<form action="#" method="get" class="sidebar-form">
<div class="input-group">
<input type="text" name="q" class="form-control" placeholder="Search...">
<span class="input-group-btn">
<button type="submit" name="search" id="search-btn" class="btn btn-flat"><i class="fa fa-search"></i>
</button>
</span>
</div>
</form>
<!-- /.search form -->
<!-- sidebar menu: : style can be found in sidebar.less -->
<ul class="sidebar-menu">
<li class="header">MAIN NAVIGATION</li>
<li class="treeview">
<a href="#">
<i class="fa fa-dashboard"></i> <span>Dashboard</span>
<span class="pull-right-container">
<i class="fa fa-angle-left pull-right"></i>
</span>
</a>
<ul class="treeview-menu">
<li><a href="../../index.html"><i class="fa fa-circle-o"></i> Dashboard v1</a></li>
<li><a href="../../index2.html"><i class="fa fa-circle-o"></i> Dashboard v2</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-files-o"></i>
<span>Layout Options</span>
<span class="pull-right-container">
<span class="label label-primary pull-right">4</span>
</span>
</a>
<ul class="treeview-menu">
<li><a href="../layout/top-nav.html"><i class="fa fa-circle-o"></i> Top Navigation</a></li>
<li><a href="../layout/boxed.html"><i class="fa fa-circle-o"></i> Boxed</a></li>
<li><a href="../layout/fixed.html"><i class="fa fa-circle-o"></i> Fixed</a></li>
<li><a href="../layout/collapsed-sidebar.html"><i class="fa fa-circle-o"></i> Collapsed Sidebar</a></li>
</ul>
</li>
<li>
<a href="../widgets.html">
<i class="fa fa-th"></i> <span>Widgets</span>
<span class="pull-right-container">
<small class="label pull-right bg-green">new</small>
</span>
</a>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-pie-chart"></i>
<span>Charts</span>
<span class="pull-right-container">
<i class="fa fa-angle-left pull-right"></i>
</span>
</a>
<ul class="treeview-menu">
<li><a href="../charts/chartjs.html"><i class="fa fa-circle-o"></i> ChartJS</a></li>
<li><a href="../charts/morris.html"><i class="fa fa-circle-o"></i> Morris</a></li>
<li><a href="../charts/flot.html"><i class="fa fa-circle-o"></i> Flot</a></li>
<li><a href="../charts/inline.html"><i class="fa fa-circle-o"></i> Inline charts</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-laptop"></i>
<span>UI Elements</span>
<span class="pull-right-container">
<i class="fa fa-angle-left pull-right"></i>
</span>
</a>
<ul class="treeview-menu">
<li><a href="../UI/general.html"><i class="fa fa-circle-o"></i> General</a></li>
<li><a href="../UI/icons.html"><i class="fa fa-circle-o"></i> Icons</a></li>
<li><a href="../UI/buttons.html"><i class="fa fa-circle-o"></i> Buttons</a></li>
<li><a href="../UI/sliders.html"><i class="fa fa-circle-o"></i> Sliders</a></li>
<li><a href="../UI/timeline.html"><i class="fa fa-circle-o"></i> Timeline</a></li>
<li><a href="../UI/modals.html"><i class="fa fa-circle-o"></i> Modals</a></li>
</ul>
</li>
<li class="treeview active">
<a href="#">
<i class="fa fa-edit"></i> <span>Forms</span>
<span class="pull-right-container">
<i class="fa fa-angle-left pull-right"></i>
</span>
</a>
<ul class="treeview-menu">
<li class="active"><a href="general.html"><i class="fa fa-circle-o"></i> General Elements</a></li>
<li><a href="advanced.html"><i class="fa fa-circle-o"></i> Advanced Elements</a></li>
<li><a href="editors.html"><i class="fa fa-circle-o"></i> Editors</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-table"></i> <span>Tables</span>
<span class="pull-right-container">
<i class="fa fa-angle-left pull-right"></i>
</span>
</a>
<ul class="treeview-menu">
<li><a href="../tables/simple.html"><i class="fa fa-circle-o"></i> Simple tables</a></li>
<li><a href="../tables/data.html"><i class="fa fa-circle-o"></i> Data tables</a></li>
</ul>
</li>
<li>
<a href="../calendar.html">
<i class="fa fa-calendar"></i> <span>Calendar</span>
<span class="pull-right-container">
<small class="label pull-right bg-red">3</small>
<small class="label pull-right bg-blue">17</small>
</span>
</a>
</li>
<li>
<a href="../mailbox/mailbox.html">
<i class="fa fa-envelope"></i> <span>Mailbox</span>
<span class="pull-right-container">
<small class="label pull-right bg-yellow">12</small>
<small class="label pull-right bg-green">16</small>
<small class="label pull-right bg-red">5</small>
</span>
</a>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-folder"></i> <span>Examples</span>
<span class="pull-right-container">
<i class="fa fa-angle-left pull-right"></i>
</span>
</a>
<ul class="treeview-menu">
<li><a href="../examples/invoice.html"><i class="fa fa-circle-o"></i> Invoice</a></li>
<li><a href="../examples/profile.html"><i class="fa fa-circle-o"></i> Profile</a></li>
<li><a href="../examples/login.html"><i class="fa fa-circle-o"></i> Login</a></li>
<li><a href="../examples/register.html"><i class="fa fa-circle-o"></i> Register</a></li>
<li><a href="../examples/lockscreen.html"><i class="fa fa-circle-o"></i> Lockscreen</a></li>
<li><a href="../examples/404.html"><i class="fa fa-circle-o"></i> 404 Error</a></li>
<li><a href="../examples/500.html"><i class="fa fa-circle-o"></i> 500 Error</a></li>
<li><a href="../examples/blank.html"><i class="fa fa-circle-o"></i> Blank Page</a></li>
<li><a href="../examples/pace.html"><i class="fa fa-circle-o"></i> Pace Page</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-share"></i> <span>Multilevel</span>
<span class="pull-right-container">
<i class="fa fa-angle-left pull-right"></i>
</span>
</a>
<ul class="treeview-menu">
<li><a href="#"><i class="fa fa-circle-o"></i> Level One</a></li>
<li>
<a href="#"><i class="fa fa-circle-o"></i> Level One
<span class="pull-right-container">
<i class="fa fa-angle-left pull-right"></i>
</span>
</a>
<ul class="treeview-menu">
<li><a href="#"><i class="fa fa-circle-o"></i> Level Two</a></li>
<li>
<a href="#"><i class="fa fa-circle-o"></i> Level Two
<span class="pull-right-container">
<i class="fa fa-angle-left pull-right"></i>
</span>
</a>
<ul class="treeview-menu">
<li><a href="#"><i class="fa fa-circle-o"></i> Level Three</a></li>
<li><a href="#"><i class="fa fa-circle-o"></i> Level Three</a></li>
</ul>
</li>
</ul>
</li>
<li><a href="#"><i class="fa fa-circle-o"></i> Level One</a></li>
</ul>
</li>
<li><a href="../../documentation/index.html"><i class="fa fa-book"></i> <span>Documentation</span></a></li>
<li class="header">LABELS</li>
<li><a href="#"><i class="fa fa-circle-o text-red"></i> <span>Important</span></a></li>
<li><a href="#"><i class="fa fa-circle-o text-yellow"></i> <span>Warning</span></a></li>
<li><a href="#"><i class="fa fa-circle-o text-aqua"></i> <span>Information</span></a></li>
</ul>
</section>
<!-- /.sidebar -->
</aside>
<!-- Content Wrapper. Contains page content -->
<div class="content-wrapper">
<!-- Content Header (Page header) -->
<section class="content-header">
<h1>
General Form Elements
<small>Preview</small>
</h1>
<ol class="breadcrumb">
<li><a href="#"><i class="fa fa-dashboard"></i> Home</a></li>
<li><a href="#">Forms</a></li>
<li class="active">General Elements</li>
</ol>
</section>
<!-- Main content -->
<section class="content">
<div class="row">
<!-- left column -->
<div class="col-md-6">
<!-- general form elements -->
<div class="box box-primary">
<div class="box-header with-border">
<h3 class="box-title">Quick Example</h3>
</div>
<!-- /.box-header -->
<!-- form start -->
<form role="form">
<div class="box-body">
<div class="form-group">
<label for="exampleInputEmail1">Email address</label>
<input type="email" class="form-control" id="exampleInputEmail1" placeholder="Enter email">
</div>
<div class="form-group">
<label for="exampleInputPassword1">Password</label>
<input type="password" class="form-control" id="exampleInputPassword1" placeholder="Password">
</div>
<div class="form-group">
<label for="exampleInputFile">File input</label>
<input type="file" id="exampleInputFile">
<p class="help-block">Example block-level help text here.</p>
</div>
<div class="checkbox">
<label>
<input type="checkbox"> Check me out
</label>
</div>
</div>
<!-- /.box-body -->
<div class="box-footer">
<button type="submit" class="btn btn-primary">Submit</button>
</div>
</form>
</div>
<!-- /.box -->
<!-- Form Element sizes -->
<div class="box box-success">
<div class="box-header with-border">
<h3 class="box-title">Different Height</h3>
</div>
<div class="box-body">
<input class="form-control input-lg" type="text" placeholder=".input-lg">
<br>
<input class="form-control" type="text" placeholder="Default input">
<br>
<input class="form-control input-sm" type="text" placeholder=".input-sm">
</div>
<!-- /.box-body -->
</div>
<!-- /.box -->
<div class="box box-danger">
<div class="box-header with-border">
<h3 class="box-title">Different Width</h3>
</div>
<div class="box-body">
<div class="row">
<div class="col-xs-3">
<input type="text" class="form-control" placeholder=".col-xs-3">
</div>
<div class="col-xs-4">
<input type="text" class="form-control" placeholder=".col-xs-4">
</div>
<div class="col-xs-5">
<input type="text" class="form-control" placeholder=".col-xs-5">
</div>
</div>
</div>
<!-- /.box-body -->
</div>
<!-- /.box -->
<!-- Input addon -->
<div class="box box-info">
<div class="box-header with-border">
<h3 class="box-title">Input Addon</h3>
</div>
<div class="box-body">
<div class="input-group">
<span class="input-group-addon">@</span>
<input type="text" class="form-control" placeholder="Username">
</div>
<br>
<div class="input-group">
<input type="text" class="form-control">
<span class="input-group-addon">.00</span>
</div>
<br>
<div class="input-group">
<span class="input-group-addon">$</span>
<input type="text" class="form-control">
<span class="input-group-addon">.00</span>
</div>
<h4>With icons</h4>
<div class="input-group">
<span class="input-group-addon"><i class="fa fa-envelope"></i></span>
<input type="email" class="form-control" placeholder="Email">
</div>
<br>
<div class="input-group">
<input type="text" class="form-control">
<span class="input-group-addon"><i class="fa fa-check"></i></span>
</div>
<br>
<div class="input-group">
<span class="input-group-addon"><i class="fa fa-dollar"></i></span>
<input type="text" class="form-control">
<span class="input-group-addon"><i class="fa fa-ambulance"></i></span>
</div>
<h4>With checkbox and radio inputs</h4>
<div class="row">
<div class="col-lg-6">
<div class="input-group">
<span class="input-group-addon">
<input type="checkbox">
</span>
<input type="text" class="form-control">
</div>
<!-- /input-group -->
</div>
<!-- /.col-lg-6 -->
<div class="col-lg-6">
<div class="input-group">
<span class="input-group-addon">
<input type="radio">
</span>
<input type="text" class="form-control">
</div>
<!-- /input-group -->
</div>
<!-- /.col-lg-6 -->
</div>
<!-- /.row -->
<h4>With buttons</h4>
<p class="margin">Large: <code>.input-group.input-group-lg</code></p>
<div class="input-group input-group-lg">
<div class="input-group-btn">
<button type="button" class="btn btn-warning dropdown-toggle" data-toggle="dropdown">Action
<span class="fa fa-caret-down"></span></button>
<ul class="dropdown-menu">
<li><a href="#">Action</a></li>
<li><a href="#">Another action</a></li>
<li><a href="#">Something else here</a></li>
<li class="divider"></li>
<li><a href="#">Separated link</a></li>
</ul>
</div>
<!-- /btn-group -->
<input type="text" class="form-control">
</div>
<!-- /input-group -->
<p class="margin">Normal</p>
<div class="input-group">
<div class="input-group-btn">
<button type="button" class="btn btn-danger">Action</button>
</div>
<!-- /btn-group -->
<input type="text" class="form-control">
</div>
<!-- /input-group -->
<p class="margin">Small <code>.input-group.input-group-sm</code></p>
<div class="input-group input-group-sm">
<input type="text" class="form-control">
<span class="input-group-btn">
<button type="button" class="btn btn-info btn-flat">Go!</button>
</span>
</div>
<!-- /input-group -->
</div>
<!-- /.box-body -->
</div>
<!-- /.box -->
</div>
<!--/.col (left) -->
<!-- right column -->
<div class="col-md-6">
<!-- Horizontal Form -->
<div class="box box-info">
<div class="box-header with-border">
<h3 class="box-title">Horizontal Form</h3>
</div>
<!-- /.box-header -->
<!-- form start -->
<form class="form-horizontal">
<div class="box-body">
<div class="form-group">
<label for="inputEmail3" class="col-sm-2 control-label">Email</label>
<div class="col-sm-10">
<input type="email" class="form-control" id="inputEmail3" placeholder="Email">
</div>
</div>
<div class="form-group">
<label for="inputPassword3" class="col-sm-2 control-label">Password</label>
<div class="col-sm-10">
<input type="password" class="form-control" id="inputPassword3" placeholder="Password">
</div>
</div>
<div class="form-group">
<div class="col-sm-offset-2 col-sm-10">
<div class="checkbox">
<label>
<input type="checkbox"> Remember me
</label>
</div>
</div>
</div>
</div>
<!-- /.box-body -->
<div class="box-footer">
<button type="submit" class="btn btn-default">Cancel</button>
<button type="submit" class="btn btn-info pull-right">Sign in</button>
</div>
<!-- /.box-footer -->
</form>
</div>
<!-- /.box -->
<!-- general form elements disabled -->
<div class="box box-warning">
<div class="box-header with-border">
<h3 class="box-title">General Elements</h3>
</div>
<!-- /.box-header -->
<div class="box-body">
<form role="form">
<!-- text input -->
<div class="form-group">
<label>Text</label>
<input type="text" class="form-control" placeholder="Enter ...">
</div>
<div class="form-group">
<label>Text Disabled</label>
<input type="text" class="form-control" placeholder="Enter ..." disabled>
</div>
<!-- textarea -->
<div class="form-group">
<label>Textarea</label>
<textarea class="form-control" rows="3" placeholder="Enter ..."></textarea>
</div>
<div class="form-group">
<label>Textarea Disabled</label>
<textarea class="form-control" rows="3" placeholder="Enter ..." disabled></textarea>
</div>
<!-- input states -->
<div class="form-group has-success">
<label class="control-label" for="inputSuccess"><i class="fa fa-check"></i> Input with success</label>
<input type="text" class="form-control" id="inputSuccess" placeholder="Enter ...">
<span class="help-block">Help block with success</span>
</div>
<div class="form-group has-warning">
<label class="control-label" for="inputWarning"><i class="fa fa-bell-o"></i> Input with
warning</label>
<input type="text" class="form-control" id="inputWarning" placeholder="Enter ...">
<span class="help-block">Help block with warning</span>
</div>
<div class="form-group has-error">
<label class="control-label" for="inputError"><i class="fa fa-times-circle-o"></i> Input with
error</label>
<input type="text" class="form-control" id="inputError" placeholder="Enter ...">
<span class="help-block">Help block with error</span>
</div>
<!-- checkbox -->
<div class="form-group">
<div class="checkbox">
<label>
<input type="checkbox">
Checkbox 1
</label>
</div>
<div class="checkbox">
<label>
<input type="checkbox">
Checkbox 2
</label>
</div>
<div class="checkbox">
<label>
<input type="checkbox" disabled>
Checkbox disabled
</label>
</div>
</div>
<!-- radio -->
<div class="form-group">
<div class="radio">
<label>
<input type="radio" name="optionsRadios" id="optionsRadios1" value="option1" checked>
Option one is this and that—be sure to include why it's great
</label>
</div>
<div class="radio">
<label>
<input type="radio" name="optionsRadios" id="optionsRadios2" value="option2">
Option two can be something else and selecting it will deselect option one
</label>
</div>
<div class="radio">
<label>
<<<<<<< HEAD
=======
>>>>>>> 30431d8469645a41f30be48b6c287e16c5030016
<input type="radio" name="optionsRadios" id="optionsRadios3" value="option3" disabled>
Option three is disabled
</label>
</div>
</div>
<!-- select -->
<div class="form-group">
<label>Select</label>
<select class="form-control">
<option>option 1</option>
<option>option 2</option>
<option>option 3</option>
<option>option 4</option>
<option>option 5</option>
</select>
</div>
<div class="form-group">
<label>Select Disabled</label>
<select class="form-control" disabled>
<option>option 1</option>
<option>option 2</option>
<option>option 3</option>
<option>option 4</option>
<option>option 5</option>
</select>
</div>
<!-- Select multiple-->
<div class="form-group">
<label>Select Multiple</label>
<select multiple class="form-control">
<option>option 1</option>
<option>option 2</option>
<option>option 3</option>
<option>option 4</option>
<option>option 5</option>
</select>
</div>
<div class="form-group">
<label>Select Multiple Disabled</label>
<select multiple class="form-control" disabled>
<option>option 1</option>
<option>option 2</option>
<option>option 3</option>
<option>option 4</option>
<option>option 5</option>
</select>
</div>
</form>
</div>
<!-- /.box-body -->
</div>
<!-- /.box -->
</div>
<!--/.col (right) -->
</div>
<!-- /.row -->
</section>
<!-- /.content -->
</div>
<!-- /.content-wrapper -->
<footer class="main-footer">
<div class="pull-right hidden-xs">
<b>Version</b> 2.3.8
</div>
<strong>Copyright © 2014-2016 <a href="http://almsaeedstudio.com">Almsaeed Studio</a>.</strong> All rights
reserved.
</footer>
<!-- Control Sidebar -->
<aside class="control-sidebar control-sidebar-dark">
<!-- Create the tabs -->
<ul class="nav nav-tabs nav-justified control-sidebar-tabs">
<li><a href="#control-sidebar-home-tab" data-toggle="tab"><i class="fa fa-home"></i></a></li>
<li><a href="#control-sidebar-settings-tab" data-toggle="tab"><i class="fa fa-gears"></i></a></li>
</ul>
<!-- Tab panes -->
<div class="tab-content">
<!-- Home tab content -->
<div class="tab-pane" id="control-sidebar-home-tab">
<h3 class="control-sidebar-heading">Recent Activity</h3>
<ul class="control-sidebar-menu">
<li>
<a href="javascript:void(0)">
<i class="menu-icon fa fa-birthday-cake bg-red"></i>
<div class="menu-info">
<h4 class="control-sidebar-subheading">Langdon's Birthday</h4>
<p>Will be 23 on April 24th</p>
</div>
</a>
</li>
<li>
<a href="javascript:void(0)">
<i class="menu-icon fa fa-user bg-yellow"></i>
<div class="menu-info">
<h4 class="control-sidebar-subheading">Frodo Updated His Profile</h4>
<p>New phone +1(800)555-1234</p>
</div>
</a>
</li>
<li>
<a href="javascript:void(0)">
<i class="menu-icon fa fa-envelope-o bg-light-blue"></i>
<div class="menu-info">
<h4 class="control-sidebar-subheading">Nora Joined Mailing List</h4>
<p>nora@example.com</p>
</div>
</a>
</li>
<li>
<a href="javascript:void(0)">
<i class="menu-icon fa fa-file-code-o bg-green"></i>
<div class="menu-info">
<h4 class="control-sidebar-subheading">Cron Job 254 Executed</h4>
<p>Execution time 5 seconds</p>
</div>
</a>
</li>
</ul>
<!-- /.control-sidebar-menu -->
<h3 class="control-sidebar-heading">Tasks Progress</h3>
<ul class="control-sidebar-menu">
<li>
<a href="javascript:void(0)">
<h4 class="control-sidebar-subheading">
Custom Template Design
<span class="label label-danger pull-right">70%</span>
</h4>
<div class="progress progress-xxs">
<div class="progress-bar progress-bar-danger" style="width: 70%"></div>
</div>
</a>
</li>
<li>
<a href="javascript:void(0)">
<h4 class="control-sidebar-subheading">
Update Resume
<span class="label label-success pull-right">95%</span>
</h4>
<div class="progress progress-xxs">
<div class="progress-bar progress-bar-success" style="width: 95%"></div>
</div>
</a>
</li>
<li>
<a href="javascript:void(0)">
<h4 class="control-sidebar-subheading">
Laravel Integration
<span class="label label-warning pull-right">50%</span>
</h4>
<div class="progress progress-xxs">
<div class="progress-bar progress-bar-warning" style="width: 50%"></div>
</div>
</a>
</li>
<li>
<a href="javascript:void(0)">
<h4 class="control-sidebar-subheading">
Back End Framework
<span class="label label-primary pull-right">68%</span>
</h4>
<div class="progress progress-xxs">
<div class="progress-bar progress-bar-primary" style="width: 68%"></div>
</div>
</a>
</li>
</ul>
<!-- /.control-sidebar-menu -->
</div>
<!-- /.tab-pane -->
<!-- Stats tab content -->
<div class="tab-pane" id="control-sidebar-stats-tab">Stats Tab Content</div>
<!-- /.tab-pane -->
<!-- Settings tab content -->
<div class="tab-pane" id="control-sidebar-settings-tab">
<form method="post">
<h3 class="control-sidebar-heading">General Settings</h3>
<div class="form-group">
<label class="control-sidebar-subheading">
Report panel usage
<input type="checkbox" class="pull-right" checked>
</label>
<p>
Some information about this general settings option
</p>
</div>
<!-- /.form-group -->
<div class="form-group">
<label class="control-sidebar-subheading">
Allow mail redirect
<input type="checkbox" class="pull-right" checked>
</label>
<p>
Other sets of options are available
</p>
</div>
<!-- /.form-group -->
<div class="form-group">
<label class="control-sidebar-subheading">
Expose author name in posts
<input type="checkbox" class="pull-right" checked>
</label>
<p>
Allow the user to show his name in blog posts
</p>
</div>
<!-- /.form-group -->
<h3 class="control-sidebar-heading">Chat Settings</h3>
<div class="form-group">
<label class="control-sidebar-subheading">
Show me as online
<input type="checkbox" class="pull-right" checked>
</label>
</div>
<!-- /.form-group -->
<div class="form-group">
<label class="control-sidebar-subheading">
Turn off notifications
<input type="checkbox" class="pull-right">
</label>
</div>
<!-- /.form-group -->
<div class="form-group">
<label class="control-sidebar-subheading">
Delete chat history
<a href="javascript:void(0)" class="text-red pull-right"><i class="fa fa-trash-o"></i></a>
</label>
</div>
<!-- /.form-group -->
</form>
</div>
<!-- /.tab-pane -->
</div>
</aside>
<!-- /.control-sidebar -->
<!-- Add the sidebar's background. This div must be placed
immediately after the control sidebar -->
<div class="control-sidebar-bg"></div>
</div>
<!-- ./wrapper -->
<!-- jQuery 2.2.3 -->
<script src="../../plugins/jQuery/jquery-2.2.3.min.js"></script>
<!-- Bootstrap 3.3.6 -->
<script src="../../bootstrap/js/bootstrap.min.js"></script>
<!-- FastClick -->
<script src="../../plugins/fastclick/fastclick.js"></script>
<!-- AdminLTE App -->
<script src="../../dist/js/app.min.js"></script>
<!-- AdminLTE for demo purposes -->
<script src="../../dist/js/demo.js"></script>
</body>
</html>
| {
"content_hash": "ba606e4cf1953b5af8342044c29aeeee",
"timestamp": "",
"source": "github",
"line_count": 1120,
"max_line_length": 165,
"avg_line_length": 40.34464285714286,
"alnum_prop": 0.44223874651440714,
"repo_name": "php2017php/project",
"id": "e4369519193d1de1a72b290faf7a2d8cdde3de77",
"size": "45186",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public/admin/adminlte/pages/forms/general.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "553"
},
{
"name": "CSS",
"bytes": "106100"
},
{
"name": "HTML",
"bytes": "3695584"
},
{
"name": "PHP",
"bytes": "153226"
},
{
"name": "Vue",
"bytes": "563"
}
],
"symlink_target": ""
} |
Procedurally Generated Matrices (PGM) data from the paper Measuring Abstract
Reasoning in Neural Networks, Barrett, Hill, Santoro et al. 2018. The goal is to
infer the correct answer from the context panels based on abstract reasoning.
To use this data set, please download all the *.tar.gz files from the data set
page and place them in ~/tensorflow_datasets/abstract_reasoning/.
$R$ denotes the set of relation types (progression, XOR, OR, AND, consistent
union), $O$ denotes the object types (shape, line), and $A$ denotes the
attribute types (size, colour, position, number). The structure of a matrix,
$S$, is the set of triples $S={[r, o, a]}$ that determine the challenge posed by
a particular matrix.
| {
"content_hash": "a6128606183655bfe0566cd92f948df3",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 80,
"avg_line_length": 59.25,
"alnum_prop": 0.7637130801687764,
"repo_name": "tensorflow/datasets",
"id": "7ed4a5264a45597edb4c24db2c65bb79d44a9bfc",
"size": "711",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow_datasets/datasets/abstract_reasoning/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "728"
},
{
"name": "JavaScript",
"bytes": "13369"
},
{
"name": "NewLisp",
"bytes": "13940"
},
{
"name": "Perl",
"bytes": "520"
},
{
"name": "Python",
"bytes": "5398856"
},
{
"name": "Roff",
"bytes": "22095"
},
{
"name": "Ruby",
"bytes": "25669"
},
{
"name": "Shell",
"bytes": "3895"
},
{
"name": "Smalltalk",
"bytes": "20604"
},
{
"name": "TeX",
"bytes": "759"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.spotify.missinglink.tests</groupId>
<artifactId>static-method-became-instance</artifactId>
<version>1-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>com.spotify.missinglink.tests</groupId>
<artifactId>a</artifactId>
<version>2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.spotify.missinglink.tests</groupId>
<artifactId>b</artifactId>
<version>1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>@project.groupId@</groupId>
<artifactId>@project.artifactId@</artifactId>
<version>@project.version@</version>
<executions>
<execution>
<goals><goal>check</goal></goals>
<phase>process-classes</phase>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
| {
"content_hash": "081b9688797c30f5c3d9723e44acd082",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 204,
"avg_line_length": 31.372093023255815,
"alnum_prop": 0.6360266864343959,
"repo_name": "pettermahlen/missinglink",
"id": "7cc0703db1f7cab50514796bc2b6c0f4f28d231a",
"size": "1349",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "maven-plugin/src/it/static-method-became-instance/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "157614"
},
{
"name": "Shell",
"bytes": "1026"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_26) on Mon May 07 13:00:01 PDT 2012 -->
<TITLE>
Uses of Class org.apache.hadoop.mapred.KeyValueLineRecordReader (Hadoop 0.20.2-cdh3u4 API)
</TITLE>
<META NAME="date" CONTENT="2012-05-07">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.mapred.KeyValueLineRecordReader (Hadoop 0.20.2-cdh3u4 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/hadoop/mapred/KeyValueLineRecordReader.html" title="class in org.apache.hadoop.mapred"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/mapred//class-useKeyValueLineRecordReader.html" target="_top"><B>FRAMES</B></A>
<A HREF="KeyValueLineRecordReader.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.hadoop.mapred.KeyValueLineRecordReader</B></H2>
</CENTER>
No usage of org.apache.hadoop.mapred.KeyValueLineRecordReader
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/hadoop/mapred/KeyValueLineRecordReader.html" title="class in org.apache.hadoop.mapred"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/mapred//class-useKeyValueLineRecordReader.html" target="_top"><B>FRAMES</B></A>
<A HREF="KeyValueLineRecordReader.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2009 The Apache Software Foundation
</BODY>
</HTML>
| {
"content_hash": "5dc25939a75ceca3dc3e5ae480fa2c67",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 230,
"avg_line_length": 42.5,
"alnum_prop": 0.6240196078431373,
"repo_name": "Shmuma/hadoop",
"id": "a3df67887ee4189e115e93cb9be2101f89e79815",
"size": "6120",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/api/org/apache/hadoop/mapred/class-use/KeyValueLineRecordReader.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "409571"
},
{
"name": "C++",
"bytes": "403118"
},
{
"name": "CSS",
"bytes": "106092"
},
{
"name": "Java",
"bytes": "15723216"
},
{
"name": "JavaScript",
"bytes": "112012"
},
{
"name": "Objective-C",
"bytes": "119767"
},
{
"name": "PHP",
"bytes": "152555"
},
{
"name": "Perl",
"bytes": "149888"
},
{
"name": "Python",
"bytes": "1217631"
},
{
"name": "Ruby",
"bytes": "28485"
},
{
"name": "Shell",
"bytes": "1438025"
},
{
"name": "Smalltalk",
"bytes": "56562"
},
{
"name": "XSLT",
"bytes": "235231"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Hist. Class. Discom. Eur. (Paris) 107 (1907)
#### Original name
Sclerotinia hirtella Boud., 1907
### Remarks
null | {
"content_hash": "acb8ef53d403e5178006cd40a021ba3c",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 44,
"avg_line_length": 15.461538461538462,
"alnum_prop": 0.7014925373134329,
"repo_name": "mdoering/backbone",
"id": "2c7c314546f55bd8f9bce3837293d57488da2960",
"size": "257",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Leotiomycetes/Helotiales/Sclerotiniaceae/Ciborinia/Ciborinia hirtella/ Syn. Sclerotinia hirtella/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?php
namespace Keratine\Form\DataTransformer;
use Doctrine\Common\Collections\ArrayCollection;
use Doctrine\Common\Persistence\ObjectManager;
use Symfony\Component\Form\DataTransformerInterface;
use Symfony\Component\Form\Exception\TransformationFailedException;
use Symfony\Component\PropertyAccess\PropertyAccess;
class StringToArrayCollectionTransformer implements DataTransformerInterface
{
/**
* @var ObjectManager
*/
private $om;
/**
* @var string
*/
private $entityClass;
/**
* @var string
*/
private $property;
/**
* @param ObjectManager $om
* @param string $entityClass
* @param string $property
*/
public function __construct(ObjectManager $om, $entityClass, $property)
{
$this->om = $om;
$this->entityClass = $entityClass;
$this->property = $property;
}
/**
* @param ArrayCollection $collection
* @return string
*/
public function transform($collection)
{
$values = array();
foreach ($collection as $entity) {
$values[] = $entity->getTitle();
}
return implode(',', $values);
}
/**
* @param string $string
* @return ArrayCollection
*/
public function reverseTransform($string)
{
$values = explode(',', $string);
$collection = new ArrayCollection();
foreach ($values as $value) {
$entity = $this->om->getRepository($this->entityClass)->findOneBy(array($this->property => $value));
if (!$entity) {
$entity = new $this->entityClass;
$accessor = PropertyAccess::createPropertyAccessor();
$accessor->setValue($entity, $this->property, $value);
$this->om->persist($entity);
}
$collection->add($entity);
}
return $collection;
}
} | {
"content_hash": "bba243512873df7ae1b553145f868cc3",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 112,
"avg_line_length": 24.58974358974359,
"alnum_prop": 0.5891553701772679,
"repo_name": "Cellules/keratine",
"id": "01cb4e7722dc4c7ea7044698d9d7bf0eca697d8f",
"size": "1918",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Keratine/Form/DataTransformer/StringToArrayCollectionTransformer.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "134412"
}
],
"symlink_target": ""
} |
import EchonestSerializer from 'ember-data-echonest/serializers/echonest';
export default EchonestSerializer.extend({
modelKey: 'track'
});
| {
"content_hash": "b53efb0130261c7bfd523eb26d46ee38",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 74,
"avg_line_length": 29,
"alnum_prop": 0.7862068965517242,
"repo_name": "elwayman02/ember-data-echonest",
"id": "dc6a76ab28f4176b3183f7eb53dc5e3ef42a3e41",
"size": "145",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "addon/serializers/echonest-track.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1038"
},
{
"name": "HTML",
"bytes": "18267"
},
{
"name": "JavaScript",
"bytes": "65871"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8" ?>
<hibernate-mapping xmlns="urn:nhibernate-mapping-2.2">
<class name="Zh.DAL.Define.Entities.Display_HomePriceChartType, Zh.DAL.Define" table="Display_HomePriceChartType">
<id name="ID" type="Guid" unsaved-value="00000000-0000-0000-0000-000000000000">
<column name="ID" length="16" sql-type="uniqueidentifier" not-null="true" unique="true"/>
<generator class="assigned" />
</id>
<!-- 类型名称 -->
<property name="Name" type="String" >
<column name="Name" length="64" sql-type="nvarchar" not-null="true"/>
</property>
<!-- 排序 -->
<property name="Sort" type="Int32" >
<column name="Sort" length="4" sql-type="int" not-null="true"/>
</property>
<bag name="Display_HomePriceChart" inverse="true" lazy="true" cascade="all">
<key column="TypeId"/>
<one-to-many class="Zh.DAL.Define.Entities.Display_HomePriceChart, Zh.DAL.Define"/>
</bag>
<bag name="Display_HomePriceChartDistributorAdvert" inverse="true" lazy="true" cascade="all">
<key column="TypeId"/>
<one-to-many class="Zh.DAL.Define.Entities.Display_HomePriceChartDistributorAdvert, Zh.DAL.Define"/>
</bag>
</class>
</hibernate-mapping>
| {
"content_hash": "c9899dbdc072ee9c4ff736cb3e0e61f9",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 115,
"avg_line_length": 43.75,
"alnum_prop": 0.6587755102040816,
"repo_name": "Caspar12/Csharp",
"id": "6a1c7d6560eb8cabf2796138218705133b0de415",
"size": "1239",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Zh.DAL.Define/Config/Zh/DAL/hbm/Display_HomePriceChartType.hbm.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "345"
},
{
"name": "Batchfile",
"bytes": "85"
},
{
"name": "C#",
"bytes": "2496338"
},
{
"name": "CSS",
"bytes": "4500"
},
{
"name": "HTML",
"bytes": "8844"
},
{
"name": "JavaScript",
"bytes": "21236"
},
{
"name": "PHP",
"bytes": "59313"
},
{
"name": "Pascal",
"bytes": "133111"
},
{
"name": "PowerShell",
"bytes": "139820"
},
{
"name": "Puppet",
"bytes": "972"
}
],
"symlink_target": ""
} |
var util = require('util');
var EventEmitter = require('events').EventEmitter;
function FakeSocket () {
EventEmitter.call(this);
this.end = function () {
var self = this;
setImmediate(function () {
self.emit('end');
});
};
this.close = function () {};
this.setKeepAlive = function () {};
this.destroy = function () {};
}
util.inherits(FakeSocket, EventEmitter);
module.exports = FakeSocket;
| {
"content_hash": "1250fd169496ef7dc0107e9a85f7d2cc",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 50,
"avg_line_length": 21.25,
"alnum_prop": 0.6352941176470588,
"repo_name": "february29/Learning",
"id": "77d4aca62065de42e61e663e1e33189fc9f8c379",
"size": "425",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "web/vue/AccountBook-Express/node_modules/kafka-node/test/mocks/mockSocket.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "31706"
},
{
"name": "C",
"bytes": "22635"
},
{
"name": "CSS",
"bytes": "85895"
},
{
"name": "HTML",
"bytes": "3008010"
},
{
"name": "Java",
"bytes": "418616"
},
{
"name": "JavaScript",
"bytes": "887230"
},
{
"name": "Objective-C",
"bytes": "59796"
},
{
"name": "Ruby",
"bytes": "1021"
},
{
"name": "Shell",
"bytes": "47542"
},
{
"name": "Swift",
"bytes": "271422"
},
{
"name": "Vue",
"bytes": "752046"
},
{
"name": "XSLT",
"bytes": "4740"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "51d628a6af15dffb71ef47470b1afea8",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "b918e11ac07560ad0c56f1793b14ac3bef5ff4c4",
"size": "187",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Caryophyllales/Aizoaceae/Mesembryanthemum/Mesembryanthemum brachyandrum/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?php
namespace DvsaCommon\Model;
use DvsaCommon\Enum\AuthorisationForTestingMotStatusCode;
class AuthorisationForTestingMotStatus
{
/** get Possible Test Quality Information array with Status Codes for PersonProfile */
public static function getPossibleStatusesForTqiAssertion()
{
return [
AuthorisationForTestingMotStatusCode::QUALIFIED,
AuthorisationForTestingMotStatusCode::DEMO_TEST_NEEDED,
AuthorisationForTestingMotStatusCode::INITIAL_TRAINING_NEEDED,
AuthorisationForTestingMotStatusCode::SUSPENDED,
];
}
} | {
"content_hash": "19abd224a73feb5e7c04e70c2e3f4997",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 90,
"avg_line_length": 28.523809523809526,
"alnum_prop": 0.7395659432387313,
"repo_name": "dvsa/mot",
"id": "982a58a44ce5249fd8317037f8b1feeb8ab77848",
"size": "599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mot-common-web-module/src/DvsaCommon/Model/AuthorisationForTestingMotStatus.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "604618"
},
{
"name": "Dockerfile",
"bytes": "2693"
},
{
"name": "Gherkin",
"bytes": "189981"
},
{
"name": "HTML",
"bytes": "1579702"
},
{
"name": "Java",
"bytes": "1631717"
},
{
"name": "JavaScript",
"bytes": "156823"
},
{
"name": "Makefile",
"bytes": "2877"
},
{
"name": "PHP",
"bytes": "20142004"
},
{
"name": "PLpgSQL",
"bytes": "61098"
},
{
"name": "Python",
"bytes": "3354"
},
{
"name": "Ruby",
"bytes": "72"
},
{
"name": "SQLPL",
"bytes": "1739266"
},
{
"name": "Shell",
"bytes": "203709"
}
],
"symlink_target": ""
} |
package resources
import "github.com/awslabs/goformation/cloudformation/policies"
// AWSDMSEndpoint_DynamoDbSettings AWS CloudFormation Resource (AWS::DMS::Endpoint.DynamoDbSettings)
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-dynamodbsettings.html
type AWSDMSEndpoint_DynamoDbSettings struct {
// ServiceAccessRoleArn AWS CloudFormation Property
// Required: false
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-dynamodbsettings.html#cfn-dms-endpoint-dynamodbsettings-serviceaccessrolearn
ServiceAccessRoleArn string `json:"ServiceAccessRoleArn,omitempty"`
// _deletionPolicy represents a CloudFormation DeletionPolicy
_deletionPolicy policies.DeletionPolicy
// _dependsOn stores the logical ID of the resources to be created before this resource
_dependsOn []string
// _metadata stores structured data associated with this resource
_metadata map[string]interface{}
}
// AWSCloudFormationType returns the AWS CloudFormation resource type
func (r *AWSDMSEndpoint_DynamoDbSettings) AWSCloudFormationType() string {
return "AWS::DMS::Endpoint.DynamoDbSettings"
}
// DependsOn returns a slice of logical ID names this resource depends on.
// see: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-dependson.html
func (r *AWSDMSEndpoint_DynamoDbSettings) DependsOn() []string {
return r._dependsOn
}
// SetDependsOn specify that the creation of this resource follows another.
// see: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-dependson.html
func (r *AWSDMSEndpoint_DynamoDbSettings) SetDependsOn(dependencies []string) {
r._dependsOn = dependencies
}
// Metadata returns the metadata associated with this resource.
// see: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-metadata.html
func (r *AWSDMSEndpoint_DynamoDbSettings) Metadata() map[string]interface{} {
return r._metadata
}
// SetMetadata enables you to associate structured data with this resource.
// see: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-metadata.html
func (r *AWSDMSEndpoint_DynamoDbSettings) SetMetadata(metadata map[string]interface{}) {
r._metadata = metadata
}
// SetDeletionPolicy applies an AWS CloudFormation DeletionPolicy to this resource
// see: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-deletionpolicy.html
func (r *AWSDMSEndpoint_DynamoDbSettings) SetDeletionPolicy(policy policies.DeletionPolicy) {
r._deletionPolicy = policy
}
| {
"content_hash": "e2edbe7bb6fa2eeb5d8a2b2913f2d82b",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 175,
"avg_line_length": 45.68421052631579,
"alnum_prop": 0.8087557603686636,
"repo_name": "PaulMaddox/goformation",
"id": "9b995832f442c9cfafa3817179fcab1f018561f7",
"size": "2604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudformation/resources/aws-dms-endpoint_dynamodbsettings.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "8167807"
},
{
"name": "JavaScript",
"bytes": "6477"
},
{
"name": "Python",
"bytes": "2540"
},
{
"name": "Shell",
"bytes": "3595"
}
],
"symlink_target": ""
} |
package org.assertj.core.description;
import static java.util.UUID.randomUUID;
import static junit.framework.Assert.assertEquals;
import static org.junit.rules.ExpectedException.none;
import org.assertj.core.description.TextDescription;
import org.junit.*;
import org.junit.rules.ExpectedException;
/**
* Tests for <code>{@link TextDescription#TextDescription(String)}</code>.
*
* @author Yvonne Wang
* @author Alex Ruiz
*/
public class TextDescription_constructor_Test {
@Rule
public ExpectedException thrown = none();
@Test
public void should_set_value() {
String value = randomText();
TextDescription description = new TextDescription(value);
assertEquals(value, description.value);
}
private static String randomText() {
return randomUUID().toString();
}
@Test
public void should_throw_error_if_value_is_null() {
thrown.expect(NullPointerException.class);
new TextDescription(null);
}
}
| {
"content_hash": "9f45b6e5892a960e1cae67f16e55199a",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 74,
"avg_line_length": 24.333333333333332,
"alnum_prop": 0.7397260273972602,
"repo_name": "yurloc/assertj-core",
"id": "0ee97ea713ccfde7a0559024cb60209e87a59e62",
"size": "1590",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java/org/assertj/core/description/TextDescription_constructor_Test.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<ScrollView xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/ScrollView1"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical" >
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:background="#ffd980"
android:orientation="vertical" >
<!-- <ImageView-->
<com.paradiseoctopus.happysquirrel.helpers.RoundedImageView
android:id="@+id/user_photo"
android:layout_width="@dimen/photo_profile"
android:layout_height="@dimen/photo_profile"
android:layout_gravity="center_horizontal"
android:layout_margin="@dimen/favourite_margin"
android:contentDescription="@string/user_photo" />
<TextView
android:id="@+id/user_name"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_horizontal"
android:text="Ololo Ololevich"
android:textSize="@dimen/text_headline" />
</LinearLayout>
<View
style="@style/divider_thinner"/>
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="ACHIEVEMENTS"
android:layout_marginLeft="@dimen/favourite_margin"
style="@style/subheadline"/>
<View
style="@style/divider"/>
<LinearLayout
android:orientation="horizontal"
android:layout_width="match_parent"
android:layout_height="wrap_content">
<ImageView
android:padding="8dip"
android:layout_width="50dp"
android:layout_height="50dp"
android:src="@drawable/ybadge"/>
<TextView
android:textStyle="italic"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_marginTop="@dimen/favourite_margin"
android:layout_gravity="center_vertical"
android:text="Environmentalist"
android:textSize="@dimen/text_headline_secondary"/>
</LinearLayout>
<View
style="@style/divider_thinner"/>
<LinearLayout
android:orientation="horizontal"
android:layout_width="match_parent"
android:layout_height="wrap_content">
<ImageView
android:padding="8dip"
android:layout_width="50dp"
android:layout_height="50dp"
android:src="@drawable/rbadge"/>
<TextView
android:textStyle="italic"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_marginTop="@dimen/favourite_margin"
android:layout_gravity="center_vertical"
android:text="Party Animal"
android:textSize="@dimen/text_headline_secondary"/>
</LinearLayout>
<View
style="@style/divider_thinner"/>
<LinearLayout
android:orientation="horizontal"
android:layout_width="match_parent"
android:layout_height="wrap_content">
<ImageView
android:padding="8dip"
android:layout_width="50dp"
android:layout_height="50dp"
android:src="@drawable/badge"/>
<TextView
android:textStyle="italic"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_marginTop="@dimen/favourite_margin"
android:layout_gravity="center_vertical"
android:text="Bug Eater"
android:textColor="@color/holo_grey"
android:textSize="@dimen/text_headline_secondary"/>
</LinearLayout>
</LinearLayout>
</ScrollView>
| {
"content_hash": "e197c064032b383ed3e4dc69cf9be396",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 72,
"avg_line_length": 35.36666666666667,
"alnum_prop": 0.5782280867106503,
"repo_name": "cliffroot/2ways-gdg",
"id": "cc537d7cf7ebfd594dae3d58e4a03208c23b3c2c",
"size": "4244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "res/layout/profile_fragment.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
/*
* state.cpp
*
* Created on: Dec 30, 2013
* Author: jonas
*/
#include "state.h"
#include "project.h"
#include "pcInterface.h"
#include <xpcc/architecture.hpp>
using namespace xpcc::stm32;
GPIO__OUTPUT(ledRed, D, 14);
GPIO__OUTPUT(ledBlue, D, 15);
GPIO__OUTPUT(ledOrange, D, 13);
GPIO__OUTPUT(ledGreen, D, 12);
int State::currentState;
void disableAllLeds(){
//disable timers
Timer1::pause();
Timer6::pause();
Timer7::pause();
ledRed::set(false);
ledBlue::set(false);
ledOrange::set(false);
ledGreen::set(false);
}
void State::init(){
ledRed::setOutput(xpcc::stm32::PUSH_PULL);
ledBlue::setOutput(xpcc::stm32::PUSH_PULL);
ledOrange::setOutput(xpcc::stm32::PUSH_PULL);
ledGreen::setOutput(xpcc::stm32::PUSH_PULL);
//configure timers:
//timer1 -> red led -> errors
Timer1::enable();
Timer1::setMode(Timer3::UP_COUNTER);
Timer1::enableInterruptVector(GeneralPurposeTimer::Interrupt::INTERRUPT_UPDATE, true, 15);
Timer1::enableInterrupt(Timer1::INTERRUPT_UPDATE);
Timer1::setPeriod(300000); // 300 ms
//timer6 -> blue led -> executing rectangle scan
Timer6::enable();
Timer6::setMode(Timer6::UP_COUNTER);
Timer6::enableInterruptVector(true, 15);
Timer6::enableInterrupt(Timer6::INTERRUPT_UPDATE);
Timer6::setPeriod(1000000); // 1000 ms
//timer7 -> orange led -> velocity controlled
Timer7::enable();
Timer7::setMode(Timer7::UP_COUNTER);
Timer7::enableInterruptVector(true, 15);
Timer7::enableInterrupt(Timer7::INTERRUPT_UPDATE);
Timer7::setPeriod(200000); // 700 ms
State::set(State::INITIALIZING);
}
void State::set(int newState){
currentState = newState;
disableAllLeds();
dout << "going to state " << newState << endl;
switch(newState){
case State::INITIALIZING:
ledOrange::set(true);
ledRed::set(true);
break;
case State::READY:
ledGreen::set(true);
break;
case State::SCANNING_RECTANGLE:
Timer6::start();
break;
case State::MOTOR1_FAULT:
case State::MOTOR2_FAULT:
case State::PC_INIT_FAIL:
Timer1::start();
break;
}
}
void State::setSecondaryState(int state, bool set){
//dout << "motor limit " << set << endl;
switch(state){
case State::MOTOR_LIMITED:
if(set){
Timer7::start();
}else{
Timer7::pause();
setOrange(false);
}
break;
}
}
void State::setRed(bool on){
ledRed::set(on);
}
void State::setBlue(bool on){
ledBlue::set(on);
}
void State::setOrange(bool on){
ledOrange::set(on);
}
void State::setGreen(bool on){
ledGreen::set(on);
}
extern "C" void
TIM1_UP_TIM10_IRQHandler(void){
Timer1::resetInterruptFlags(Timer1::FLAG_UPDATE);
ledRed::toggle();
}
extern "C" void
TIM6_DAC_IRQHandler(void){
Timer6::resetInterruptFlags(Timer6::FLAG_UPDATE);
ledBlue::toggle();
}
extern "C" void
TIM7_IRQHandler(void){
Timer7::resetInterruptFlags(Timer7::FLAG_UPDATE);
ledOrange::toggle();
}
| {
"content_hash": "9887163386da523313d644ac9378b482",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 91,
"avg_line_length": 20.09285714285714,
"alnum_prop": 0.6935655883398507,
"repo_name": "jrahlf/3D-Non-Contact-Laser-Profilometer",
"id": "55bc01bb2173c0e59262b1a63fa7b6cb6f2ce1ae",
"size": "2813",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "microcontroller/state.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "35578"
},
{
"name": "C",
"bytes": "10808820"
},
{
"name": "C++",
"bytes": "6982194"
},
{
"name": "CSS",
"bytes": "20227"
},
{
"name": "Gnuplot",
"bytes": "823"
},
{
"name": "Java",
"bytes": "89102"
},
{
"name": "Objective-C",
"bytes": "12577"
},
{
"name": "Objective-C++",
"bytes": "2376"
},
{
"name": "Python",
"bytes": "420663"
},
{
"name": "Shell",
"bytes": "639"
}
],
"symlink_target": ""
} |
body {
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
}
header {
padding-top: 65px;
}
h1 {
font-size: 100px;
font-weight: 500;
display: inline-block;
padding-bottom: 17px;
border-bottom: 1px solid #bbb;
}
.bask-amp {
font-family: Baskerville, serif;
font-style: italic;
font-size: 50px;
margin: 0 -5px;
}
.center {
text-align: center;
}
.subtitle {
position: relative;
bottom: 32px;
font-size: 16px;
background-color: white;
padding: 0 10px;
font-weight: bold;
display: inline;
}
.announcement {
font-family: 'Alegreya Sans', serif;
font-size: 14px;
margin-top: 300px;
letter-spacing: 1.2px;
font-weight: 700;
} | {
"content_hash": "af6d8663f0d4343441843cc47a27f70a",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 38,
"avg_line_length": 15.4,
"alnum_prop": 0.6666666666666666,
"repo_name": "c-johnson/bnm",
"id": "325a69af2d5e2ab9b2268a3207f66270f5efa058",
"size": "693",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public/css/home.css",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "693"
},
{
"name": "Go",
"bytes": "6429"
}
],
"symlink_target": ""
} |
import json
import logging
import requests
from docker_registry.lib import config
import docker_registry.lib.signals
from docker_registry import storage
from docker_registry import tags
logger = logging.getLogger(__name__)
logger.info("Loading OpenShift tag_created extension")
cfg = config.load()
store = storage.load()
openshift_url = None
openshift_insecure = False
openshift_ca_bundle = None
openshift_client_cert = None
openshift_client_key = None
registry_url = None
if cfg.extensions is not None and cfg.extensions.openshift is not None:
cfg = cfg.extensions.openshift
openshift_url = cfg.openshift_url
logger.info("OpenShift URL: {0}".format(openshift_url))
openshift_insecure = cfg.openshift_insecure
logger.info("OpenShift insecure: {0}".format(openshift_insecure))
openshift_ca_bundle = cfg.openshift_ca_bundle
logger.info("OpenShift CA bundle: {0}".format(openshift_ca_bundle))
openshift_client_cert = cfg.openshift_client_cert
logger.info("OpenShift client certificate: {0}".format(openshift_client_cert))
openshift_client_key = cfg.openshift_client_key
logger.info("OpenShift client key: {0}".format(openshift_client_key))
if cfg.registry_url is not None:
registry_url = cfg.registry_url
logger.info("Registry URL: {0}".format(registry_url))
def tag_created(sender, namespace, repository, tag, value):
logger.debug("[openshift] namespace={0}; repository={1} tag={2} value={3}".
format(namespace, repository, tag, value))
try:
if tag != value:
store.put_content(
store.tag_path(namespace, repository, value), value)
data = tags.create_tag_json(user_agent='')
json_path = store.repository_tag_json_path(namespace, repository, value)
store.put_content(json_path, data)
data = store.get_content(store.image_json_path(value))
image = json.loads(data)
_post_repository_binding(namespace, repository, tag, value, image)
except Exception:
logger.exception("unable to create openshift ImageRepositoryMapping")
def _post_repository_binding(namespace, repository, tag, image_id, image):
url = '{0}/imageRepositoryMappings'.format(openshift_url)
params = {"sync": "true", "namespace": namespace}
headers = {}
# headers = {'Authorization': self.authorization}
name = "{0}/{1}/{2}".format(registry_url, namespace, repository).strip('/')
ref = "{0}:{1}".format(name, image_id)
body = {
"kind": "ImageRepositoryMapping",
"apiVersion": "v1beta1",
"metadata": {
"name": repository,
"namespace": namespace,
},
"dockerImageRepository": name,
"image": {
"metadata": {
"name": image_id,
},
"dockerImageReference": ref,
"dockerImageMetadata": {
"Id": image['id'],
"Parent": image.get('parent', ''),
"Comment": image.get('comment', ''),
"Created": image.get('created', ''),
"Container": image.get('container', ''),
"ContainerConfig": image.get('container_config', ''),
"DockerVersion": image.get('docker_version', ''),
"Author": image.get('author', ''),
"Config": image.get('config', ''),
"Architecture": image.get('architecture', ''),
"Size": image.get('Size', ''),
}
},
"tag": tag
}
logger.debug("saving\n" + json.dumps(body))
postArgs = {
'params': params,
'headers': headers,
'data': json.dumps(body),
}
if openshift_ca_bundle is not None:
postArgs["verify"] = openshift_ca_bundle
elif openshift_insecure:
postArgs["verify"] = False
else:
postArgs["verify"] = True
if openshift_client_cert is not None and openshift_client_key is not None:
postArgs["cert"] = (openshift_client_cert, openshift_client_key)
elif openshift_client_cert is not None:
postArgs["cert"] = openshift_client_cert
resp = requests.post(url, **postArgs)
if resp.status_code == 422:
logger.debug('openshift#_post_repository_binding: invalid request: %s' % resp.text)
return False
if resp.status_code != 200:
logger.debug('openshift#_post_repository_binding: update returns status {0}\n{1}'. # nopep8
format(resp.status_code, resp.text))
return False
return True
if openshift_url is not None and registry_url is not None:
docker_registry.lib.signals.tag_created.connect(tag_created)
logger.info("OpenShift tag_created extension enabled")
else:
logger.info("OpenShift tag_created extension disabled - missing openshift_url and/or registry_url")
| {
"content_hash": "6841884954f0671c3f96125e771ec099",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 103,
"avg_line_length": 34.5886524822695,
"alnum_prop": 0.6239491490670495,
"repo_name": "openshift/docker-registry-extensions",
"id": "3e9e47f824047132eda9180b4b2635ca15910204",
"size": "4877",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openshift/tag_created.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5719"
},
{
"name": "Shell",
"bytes": "876"
}
],
"symlink_target": ""
} |
let formatParticipant = participant => participant != null
? participant.name
: '[Unknown]';
let formatAmount = amount => amount.toFixed(2);
let formatPayment = (payment, payee) =>
'- ' + formatAmount(payment.amount) + ' to ' + formatParticipant(payee);
let formatPayerPayments = (payerId, payments, participants) => {
let payerLine = formatParticipant(participants[payerId]) + ' owes:';
let paymentLines = payments.map(p => formatPayment(p, participants[p.payeeId]));
return [payerLine, ...paymentLines].join('\n');
};
let toString = (paymentsByPayerId, participants) => {
let payerPayments = Object.entries(paymentsByPayerId).map(([payerId, payments]) =>
formatPayerPayments(payerId, payments, participants));
return payerPayments.join('\n');
};
export default toString;
| {
"content_hash": "0655b829d79fbc5710fef8c9efb738e5",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 86,
"avg_line_length": 33.84,
"alnum_prop": 0.6725768321513003,
"repo_name": "manisero/FairShare",
"id": "7fa17cfe3bf5a79174a5d3141cb65476c7373447",
"size": "846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/5_logic/settlement/toString.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "145"
},
{
"name": "JavaScript",
"bytes": "101848"
}
],
"symlink_target": ""
} |
package com.bzh.gl.lesson7;
import android.app.ActivityManager;
import android.content.Context;
import android.content.pm.ConfigurationInfo;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.util.DisplayMetrics;
import android.view.View;
import android.widget.Button;
import com.bzh.gl.R;
public class LessonSevenActivity extends AppCompatActivity {
private LessonSevenGLSurfaceView mGlSurfaceView;
private Action mRender;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.lesson_sevent);
mGlSurfaceView = (LessonSevenGLSurfaceView) findViewById(R.id.gl_surface_view);
// Check if the system support OpenGL ES 2.0
final ActivityManager activityManager = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE);
final ConfigurationInfo configurationInfo = activityManager.getDeviceConfigurationInfo();
final boolean supportsEs2 = configurationInfo.reqGlEsVersion >= 0x20000;
if (supportsEs2) {
// Request an OpenGL ES 2.0 compatible context
mGlSurfaceView.setEGLContextClientVersion(2);
final DisplayMetrics displayMetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(displayMetrics);
// Set the renderer to out demo renderer, define now
// mRender = new LessonSevenRenderer(this, mGlSurfaceView);
mRender = new NativeSevenRenderer(this, mGlSurfaceView);
mRender.init();
mGlSurfaceView.setRenderer((GLSurfaceView.Renderer) mRender, displayMetrics.density);
} else {
return;
}
findViewById(R.id.button_decrease_num_cubes).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
decreaseCubeCount();
}
});
findViewById(R.id.button_increase_num_cubes).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
increaseCubeCount();
}
});
findViewById(R.id.button_switch_VBOs).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
toggleVBOs();
}
});
findViewById(R.id.button_switch_stride).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
toggleStride();
}
});
}
@Override
protected void onResume() {
// The activity must call the GL surface view's onResume() on activity
// onResume().
super.onResume();
mGlSurfaceView.onResume();
}
@Override
protected void onPause() {
// The activity must call the GL surface view's onPause() on activity
// onPause().
super.onPause();
mGlSurfaceView.onPause();
}
private void decreaseCubeCount() {
mGlSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
mRender.decreaseCubeCount();
}
});
}
private void increaseCubeCount() {
mGlSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
mRender.increaseCubeCount();
}
});
}
private void toggleVBOs() {
mGlSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
mRender.toggleVBOs();
}
});
}
protected void toggleStride() {
mGlSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
mRender.toggleStride();
}
});
}
public void updateVboStatus(final boolean usingVbos) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (usingVbos) {
((Button) findViewById(R.id.button_switch_VBOs)).setText(R.string.lesson_seven_using_VBOs);
} else {
((Button) findViewById(R.id.button_switch_VBOs)).setText(R.string.lesson_seven_not_using_VBOs);
}
}
});
}
public void updateStrideStatus(final boolean useStride) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (useStride) {
((Button) findViewById(R.id.button_switch_stride)).setText(R.string.lesson_seven_using_stride);
} else {
((Button) findViewById(R.id.button_switch_stride)).setText(R.string.lesson_seven_not_using_stride);
}
}
});
}
@Override
protected void onDestroy() {
super.onDestroy();
mRender.destroy();
}
}
| {
"content_hash": "14b73e216662efa480cf3f43105b3ed8",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 119,
"avg_line_length": 31.660493827160494,
"alnum_prop": 0.5934880093585494,
"repo_name": "biezhihua/Android_OpenGL_Demo",
"id": "b43b41981259cf70ad9e86eeca0827630005b5a0",
"size": "5129",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "OpenGLLesson/app/src/main/java/com/bzh/gl/lesson7/LessonSevenActivity.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "850"
},
{
"name": "C++",
"bytes": "383697"
},
{
"name": "CMake",
"bytes": "5774"
},
{
"name": "GLSL",
"bytes": "48639"
},
{
"name": "Java",
"bytes": "548169"
},
{
"name": "JavaScript",
"bytes": "30282"
},
{
"name": "Makefile",
"bytes": "3461"
},
{
"name": "Python",
"bytes": "3185"
}
],
"symlink_target": ""
} |
import * as CopyWebpackPlugin from 'copy-webpack-plugin';
import * as path from 'path';
import {Configuration, DefinePlugin, LoaderOptionsPlugin, optimize} from 'webpack';
const config: Configuration = {
entry: './src/index.tsx',
output: {
filename: 'bundle.js',
path: path.join(__dirname, 'dist')
},
resolve: {
extensions: ['.webpack.js', '.web.js', '.ts', '.tsx', '.js']
},
module: {
rules: [
{
test: /\.tsx?$/,
exclude: '/node_modules/',
use: [{
loader: 'babel-loader'
}, {
loader: 'awesome-typescript-loader',
options: {
configFileName: 'tsconfig.prod.json'
}
}]
}, {
test: /\.scss$/,
exclude: '/node_modules/',
use: [{
loader: 'style-loader'
}, {
loader: 'sass-loader',
query: {
sourceMap: false
}
}]
}
]
},
plugins: [
new CopyWebpackPlugin([{
from: 'src/index.html'
}]),
new LoaderOptionsPlugin({
minimize: true,
debug: false
}),
new DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify('production'),
}
}),
new optimize.UglifyJsPlugin(),
new optimize.ModuleConcatenationPlugin()
]
};
export = config;
| {
"content_hash": "e0a8b5706e69aa263f9be6e963aaf4c2",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 83,
"avg_line_length": 26.403225806451612,
"alnum_prop": 0.4129505192425168,
"repo_name": "openscript/foolproof-react-starterkit",
"id": "e8958edec18a80af7d6448b1f5b5eea60cd4ab9b",
"size": "1637",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webpack.prod.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "306"
},
{
"name": "TypeScript",
"bytes": "3562"
}
],
"symlink_target": ""
} |
```txt
<application android:allowTaskReparenting=["true" | "false"]
android:allowBackup=["true" | "false"]
android:backupAgent="string"
android:backupInForeground=["true" | "false"]
android:banner="drawable resource"
android:debuggable=["true" | "false"]
android:description="string resource"
android:enabled=["true" | "false"]
android:extractNativeLibs=["true" | "false"]
android:fullBackupContent="string"
android:fullBackupOnly=["true" | "false"]
android:hasCode=["true" | "false"]
android:hardwareAccelerated=["true" | "false"]
android:icon="drawable resource"
android:isGame=["true" | "false"]
android:killAfterRestore=["true" | "false"]
android:largeHeap=["true" | "false"]
android:label="string resource"
android:logo="drawable resource"
android:manageSpaceActivity="string"
android:name="string"
android:permission="string"
android:persistent=["true" | "false"]
android:process="string"
android:restoreAnyVersion=["true" | "false"]
android:requiredAccountType="string"
android:resizeableActivity=["true" | "false"]
android:restrictedAccountType="string"
android:supportsRtl=["true" | "false"]
android:taskAffinity="string"
android:testOnly=["true" | "false"]
android:theme="resource or theme"
android:uiOptions=["none" | "splitActionBarWhenNarrow"]
android:usesCleartextTraffic=["true" | "false"]
android:vmSafeMode=["true" | "false"] >
. . .
</application>
```
## 父节点:
manifest
## 可以包含的子节点:
activity,activity-alias,,meta-data,service,receiver,provider,uses-library
| {
"content_hash": "af6d1cdc3b93a59ecc591ee686211674",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 73,
"avg_line_length": 40.93617021276596,
"alnum_prop": 0.5758835758835759,
"repo_name": "cdcdec/LearningNotes",
"id": "ef036f5dd9ed8d57a8cd37cbfa05a818bc440814",
"size": "1973",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Android/配置文件/application.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "19896"
},
{
"name": "HTML",
"bytes": "2826"
},
{
"name": "Java",
"bytes": "231"
}
],
"symlink_target": ""
} |
package com.zaxxer.hikari;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.zaxxer.hikari.pool.Mediator;
import com.zaxxer.hikari.pool.PoolEntry;
import com.zaxxer.hikari.util.ConcurrentBag;
import com.zaxxer.hikari.util.ConcurrentBag.IBagStateListener;
/**
*
* @author Brett Wooldridge
*/
public class TestConcurrentBag
{
private static HikariDataSource ds;
private static Mediator mediator;
@BeforeClass
public static void setup()
{
HikariConfig config = new HikariConfig();
config.setMinimumIdle(1);
config.setMaximumPoolSize(2);
config.setInitializationFailFast(true);
config.setConnectionTestQuery("VALUES 1");
config.setDataSourceClassName("com.zaxxer.hikari.mocks.StubDataSource");
ds = new HikariDataSource(config);
mediator = new Mediator(TestElf.getPool(ds));
}
@AfterClass
public static void teardown()
{
ds.close();
}
@Test
public void testConcurrentBag() throws Exception
{
ConcurrentBag<PoolEntry> bag = new ConcurrentBag<PoolEntry>(new IBagStateListener() {
@Override
public Future<Boolean> addBagItem()
{
return new Future<Boolean>() {
@Override
public boolean isDone()
{
return true;
}
@Override
public boolean isCancelled()
{
return false;
}
@Override
public Boolean get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException
{
return null;
}
@Override
public Boolean get() throws InterruptedException, ExecutionException
{
return true;
}
@Override
public boolean cancel(boolean mayInterruptIfRunning)
{
return false;
}
};
}
});
Assert.assertEquals(0, bag.values(8).size());
PoolEntry reserved = mediator.newPoolEntry();
bag.add(reserved);
bag.reserve(reserved); // reserved
PoolEntry inuse = mediator.newPoolEntry();
bag.add(inuse);
bag.borrow(2, TimeUnit.MILLISECONDS); // in use
PoolEntry notinuse = mediator.newPoolEntry();
bag.add(notinuse); // not in use
bag.dumpState();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos, true);
TestElf.setSlf4jTargetStream(ConcurrentBag.class, ps);
bag.requite(reserved);
Assert.assertTrue(new String(baos.toByteArray()).contains("does not exist"));
bag.remove(notinuse);
Assert.assertTrue(new String(baos.toByteArray()).contains("not borrowed or reserved"));
bag.unreserve(notinuse);
Assert.assertTrue(new String(baos.toByteArray()).contains("was not reserved"));
bag.remove(inuse);
bag.remove(inuse);
Assert.assertTrue(new String(baos.toByteArray()).contains("not borrowed or reserved"));
bag.close();
try {
PoolEntry bagEntry = mediator.newPoolEntry();
bag.add(bagEntry);
Assert.assertNotEquals(bagEntry, bag.borrow(100, TimeUnit.MILLISECONDS));
}
catch (IllegalStateException e) {
Assert.assertTrue(new String(baos.toByteArray()).contains("ignoring add()"));
}
Assert.assertNotNull(notinuse.toString());
}
}
| {
"content_hash": "f7a6931ea13cb3ad744d27bee6d0c0e4",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 128,
"avg_line_length": 29.14074074074074,
"alnum_prop": 0.6148957803762074,
"repo_name": "785468931/HikariCP",
"id": "3b1fce5ce7501d1686565e7e1f433c566278198c",
"size": "4533",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "src/test/java/com/zaxxer/hikari/TestConcurrentBag.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "412338"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "d67f15b0189a7d042b7697870c84cb75",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "d6cccdc9947fb99f0fad556d9a0aa490b24efced",
"size": "178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Piperales/Piperaceae/Piper/Piper subseptemnervium/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
title: "Other APIs"
description: "Sensu offers API endpoints for basic authentication, checking cluster health checks, and retrieving metrics, license information, and version information."
product: "Sensu Go"
version: "6.8"
weight: 20
layout: "single"
toc: false
menu:
sensu-go-6.8:
parent: api
identifier: other
---
In addition to the [core/v2 API][1] and [enterprise APIs][2], Sensu offers endpoints for basic authentication, health, license, metrics, and version:
{{< otherapiListing >}}
[1]: ../core/
[2]: ../enterprise/
| {
"content_hash": "6730bdc85ecff21fd4660e1a7336a839",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 169,
"avg_line_length": 27,
"alnum_prop": 0.7222222222222222,
"repo_name": "sensu/sensu-docs",
"id": "0558bbde4bbf19e7037820ef7ce95209d8f58852",
"size": "544",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "content/sensu-go/6.8/api/other/_index.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "123020"
},
{
"name": "JavaScript",
"bytes": "61971"
},
{
"name": "Procfile",
"bytes": "14"
},
{
"name": "Python",
"bytes": "3764"
},
{
"name": "Ruby",
"bytes": "4422"
},
{
"name": "SCSS",
"bytes": "32403"
},
{
"name": "Shell",
"bytes": "30924"
}
],
"symlink_target": ""
} |
actions :update, :reset, :reset_all
default_action :update
attr_accessor :exists
attribute :name, name_attribute: true, kind_of: String
attribute :mode, kind_of: Symbol, equal_to: [:ioo, :vmo, :schedo, :no], required: true
attribute :tunables, kind_of: Hash
attribute :permanent, kind_of: [TrueClass, FalseClass], default: false
attribute :nextboot, kind_of: [TrueClass, FalseClass], default: false
| {
"content_hash": "2c7bdb9ad45488889b072e4504277d7a",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 86,
"avg_line_length": 44.44444444444444,
"alnum_prop": 0.7475,
"repo_name": "adejoux/aix",
"id": "296f898617d3085ccf9c766ce60f16850a05e580",
"size": "1011",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resources/tunables.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ruby",
"bytes": "325174"
},
{
"name": "Shell",
"bytes": "25738"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE hibernate-mapping PUBLIC
"-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<!-- 映射对应的 package -->
<hibernate-mapping package="com.ap.entity">
<!-- 实体类和数据库中的表对应(如果没有这个表则新建) -->
<class name="Account" table="account">
<!-- id主键 和其他属性对应表中相应的字段(这些都是在 User.java 实体类中定义的) -->
<id name="id" column="id"/>
<property name="balance" column="balance"></property>
<property name="creditCard" column="creditCard"></property>
</class>
</hibernate-mapping> | {
"content_hash": "62272fe7f8131421f031d82ee31ac145",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 69,
"avg_line_length": 34.833333333333336,
"alnum_prop": 0.6220095693779905,
"repo_name": "FelixCfs/Agricultural-products-trading-platform-based-on-SSH",
"id": "12b3e298aa60da4e1e000a4d0477f5058bcca28f",
"size": "745",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/com/ap/entity/Account.hbm.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "32418"
},
{
"name": "Java",
"bytes": "131840"
},
{
"name": "JavaScript",
"bytes": "96453"
}
],
"symlink_target": ""
} |