hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2d73fd4bec9d891095b8e6a69a2326e9f820cb42 | 12,013 | cpp | C++ | platform/default/src/mbgl/map/map_snapshotter.cpp | roblabs/maplibre-gl-native | d62ff400c6f75750d71b563344b1ca1e07b9b576 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 4,234 | 2015-01-09T08:10:16.000Z | 2022-03-30T14:13:55.000Z | platform/default/src/mbgl/map/map_snapshotter.cpp | roblabs/maplibre-gl-native | d62ff400c6f75750d71b563344b1ca1e07b9b576 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 12,771 | 2015-01-01T20:27:42.000Z | 2022-03-24T18:14:44.000Z | platform/default/src/mbgl/map/map_snapshotter.cpp | roblabs/maplibre-gl-native | d62ff400c6f75750d71b563344b1ca1e07b9b576 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 1,571 | 2015-01-08T08:24:53.000Z | 2022-03-28T06:30:53.000Z | #include <mbgl/map/map_snapshotter.hpp>
#include <mbgl/actor/actor_ref.hpp>
#include <mbgl/gfx/headless_frontend.hpp>
#include <mbgl/map/map.hpp>
#include <mbgl/map/map_options.hpp>
#include <mbgl/map/transform.hpp>
#include <mbgl/map/transform_state.hpp>
#include <mbgl/renderer/renderer_observer.hpp>
#include <mbgl/renderer/update_parameters.hpp>
#include <mbgl/storage/resource_options.hpp>
#include <mbgl/style/style.hpp>
#include <mbgl/util/event.hpp>
#include <mbgl/util/exception.hpp>
#include <mbgl/util/logging.hpp>
#include <mbgl/util/thread.hpp>
#include <utility>
namespace mbgl {
// static
MapSnapshotterObserver& MapSnapshotterObserver::nullObserver() {
static MapSnapshotterObserver mapSnapshotterObserver;
return mapSnapshotterObserver;
}
class ForwardingRendererObserver final : public RendererObserver {
public:
explicit ForwardingRendererObserver(RendererObserver& delegate_)
: mailbox(std::make_shared<Mailbox>(*Scheduler::GetCurrent())), delegate(delegate_, mailbox) {}
~ForwardingRendererObserver() override { mailbox->close(); }
void onInvalidate() override { delegate.invoke(&RendererObserver::onInvalidate); }
void onResourceError(std::exception_ptr err) override { delegate.invoke(&RendererObserver::onResourceError, err); }
void onDidFinishRenderingFrame(RenderMode mode, bool repaintNeeded, bool placementChanged) override {
delegate.invoke(&RendererObserver::onDidFinishRenderingFrame, mode, repaintNeeded, placementChanged);
}
void onStyleImageMissing(const std::string& image, const StyleImageMissingCallback& cb) override {
delegate.invoke(&RendererObserver::onStyleImageMissing, image, cb);
}
private:
std::shared_ptr<Mailbox> mailbox;
ActorRef<RendererObserver> delegate;
};
class SnapshotterRenderer final : public RendererObserver {
public:
SnapshotterRenderer(Size size, float pixelRatio, const optional<std::string>& localFontFamily)
: frontend(size,
pixelRatio,
gfx::HeadlessBackend::SwapBehaviour::NoFlush,
gfx::ContextMode::Unique,
localFontFamily) {}
void reset() {
hasPendingStillImageRequest = false;
frontend.reset();
}
void onInvalidate() override { rendererObserver->onInvalidate(); }
void onResourceError(std::exception_ptr err) override {
hasPendingStillImageRequest = false;
rendererObserver->onResourceError(err);
}
void onDidFinishRenderingFrame(RenderMode mode, bool repaintNeeded, bool placementChanged) override {
if (mode == RenderMode::Full && hasPendingStillImageRequest) {
stillImage = frontend.readStillImage();
}
rendererObserver->onDidFinishRenderingFrame(mode, repaintNeeded, placementChanged);
}
void onStyleImageMissing(const std::string& id, const StyleImageMissingCallback& done) override {
rendererObserver->onStyleImageMissing(id, done);
}
void setObserver(std::shared_ptr<RendererObserver> observer) {
assert(observer);
rendererObserver = std::move(observer);
frontend.setObserver(*this);
}
void update(std::shared_ptr<UpdateParameters> params) {
assert(params);
hasPendingStillImageRequest = params->stillImageRequest;
frontend.update(std::move(params));
}
void setSize(Size size) { frontend.setSize(size); }
PremultipliedImage takeImage() {
assert(stillImage.valid());
return std::move(stillImage);
}
private:
PremultipliedImage stillImage;
bool hasPendingStillImageRequest = false;
std::shared_ptr<RendererObserver> rendererObserver;
HeadlessFrontend frontend;
};
class SnapshotterRendererFrontend final : public RendererFrontend {
public:
SnapshotterRendererFrontend(Size size, float pixelRatio, optional<std::string> localFontFamily)
: renderer(std::make_unique<util::Thread<SnapshotterRenderer>>(
"Snapshotter", size, pixelRatio, std::move(localFontFamily))) {}
~SnapshotterRendererFrontend() override = default;
void reset() override { renderer->actor().invoke(&SnapshotterRenderer::reset); }
void setObserver(RendererObserver& observer) override {
renderer->actor().invoke(&SnapshotterRenderer::setObserver,
std::make_unique<ForwardingRendererObserver>(observer));
}
void update(std::shared_ptr<UpdateParameters> parameters) override {
updateParameters = std::move(parameters);
renderer->actor().invoke(&SnapshotterRenderer::update, updateParameters);
}
void setSize(Size size) { renderer->actor().invoke(&SnapshotterRenderer::setSize, size); }
const TransformState& getTransformState() const {
assert(updateParameters);
static TransformState defaultTransformState{};
if (updateParameters) return updateParameters->transformState;
return defaultTransformState;
}
PremultipliedImage takeImage() { return renderer->actor().ask(&SnapshotterRenderer::takeImage).get(); }
private:
std::shared_ptr<UpdateParameters> updateParameters;
const std::unique_ptr<util::Thread<SnapshotterRenderer>> renderer;
};
class MapSnapshotter::Impl final : public MapObserver {
public:
Impl(Size size,
float pixelRatio,
const ResourceOptions& resourceOptions,
MapSnapshotterObserver& observer_,
optional<std::string> localFontFamily)
: observer(observer_),
frontend(size, pixelRatio, std::move(localFontFamily)),
map(frontend,
*this,
MapOptions().withMapMode(MapMode::Static).withSize(size).withPixelRatio(pixelRatio),
resourceOptions) {}
void setRegion(const LatLngBounds& region) {
mbgl::EdgeInsets insets{0, 0, 0, 0};
std::vector<LatLng> latLngs = {region.southwest(), region.northeast()};
map.jumpTo(map.cameraForLatLngs(latLngs, insets));
}
void snapshot(MapSnapshotter::Callback callback) {
if (!callback) {
Log::Error(Event::General, "MapSnapshotter::Callback is not set");
return;
}
if (renderStillCallback) {
callback(std::make_exception_ptr(util::MisuseException("MapSnapshotter is currently rendering an image")),
PremultipliedImage(),
{},
{},
{});
}
renderStillCallback = std::make_unique<Actor<MapSnapshotter::Callback>>(
*Scheduler::GetCurrent(),
[this, cb = std::move(callback)](std::exception_ptr ptr,
PremultipliedImage image,
Attributions attributions,
PointForFn pfn,
LatLngForFn latLonFn) {
cb(std::move(ptr), std::move(image), std::move(attributions), std::move(pfn), std::move(latLonFn));
renderStillCallback.reset();
});
map.renderStill([this, actorRef = renderStillCallback->self()](const std::exception_ptr& error) {
// Create lambda that captures the current transform state
// and can be used to translate for geographic to screen
// coordinates
assert(map.getCameraOptions().center);
PointForFn pointForFn = [center = *map.getCameraOptions().center,
transformState = frontend.getTransformState()](const LatLng& latLng) {
LatLng unwrappedLatLng = latLng.wrapped();
unwrappedLatLng.unwrapForShortestPath(center);
Transform transform{transformState};
return transform.latLngToScreenCoordinate(unwrappedLatLng);
};
// Create lambda that captures the current transform state
// and can be used to translate for geographic to screen
// coordinates
LatLngForFn latLngForFn = [transformState =
frontend.getTransformState()](const ScreenCoordinate& screenCoordinate) {
Transform transform{transformState};
return transform.screenCoordinateToLatLng(screenCoordinate);
};
// Collect all source attributions
std::vector<std::string> attributions;
for (auto source : map.getStyle().getSources()) {
auto attribution = source->getAttribution();
if (attribution) {
attributions.push_back(*attribution);
}
}
// Invoke callback
actorRef.invoke(&MapSnapshotter::Callback::operator(),
error,
error ? PremultipliedImage() : frontend.takeImage(),
std::move(attributions),
std::move(pointForFn),
std::move(latLngForFn));
});
}
// MapObserver overrides
void onDidFailLoadingMap(MapLoadError, const std::string& error) override { observer.onDidFailLoadingStyle(error); }
void onDidFinishLoadingStyle() override { observer.onDidFinishLoadingStyle(); }
void onStyleImageMissing(const std::string& image) override { observer.onStyleImageMissing(image); }
Map& getMap() { return map; }
const Map& getMap() const { return map; }
SnapshotterRendererFrontend& getRenderer() { return frontend; }
void cancel() { renderStillCallback.reset(); }
private:
std::unique_ptr<Actor<MapSnapshotter::Callback>> renderStillCallback;
MapSnapshotterObserver& observer;
SnapshotterRendererFrontend frontend;
Map map;
};
MapSnapshotter::MapSnapshotter(Size size,
float pixelRatio,
const ResourceOptions& resourceOptions,
MapSnapshotterObserver& observer,
optional<std::string> localFontFamily)
: impl(std::make_unique<MapSnapshotter::Impl>(
size, pixelRatio, resourceOptions, observer, std::move(localFontFamily))) {}
MapSnapshotter::MapSnapshotter(Size size, float pixelRatio, const ResourceOptions& resourceOptions)
: MapSnapshotter(size, pixelRatio, resourceOptions, MapSnapshotterObserver::nullObserver()) {}
MapSnapshotter::~MapSnapshotter() = default;
void MapSnapshotter::setStyleURL(const std::string& styleURL) {
impl->getMap().getStyle().loadURL(styleURL);
}
std::string MapSnapshotter::getStyleURL() const {
return impl->getMap().getStyle().getURL();
}
void MapSnapshotter::setStyleJSON(const std::string& styleJSON) {
impl->getMap().getStyle().loadJSON(styleJSON);
}
std::string MapSnapshotter::getStyleJSON() const {
return impl->getMap().getStyle().getJSON();
}
void MapSnapshotter::setSize(const Size& size) {
impl->getMap().setSize(size);
impl->getRenderer().setSize(size);
}
Size MapSnapshotter::getSize() const {
return impl->getMap().getMapOptions().size();
}
void MapSnapshotter::setCameraOptions(const CameraOptions& options) {
impl->getMap().jumpTo(options);
}
CameraOptions MapSnapshotter::getCameraOptions() const {
EdgeInsets insets;
return impl->getMap().getCameraOptions(insets);
}
void MapSnapshotter::setRegion(const LatLngBounds& region) {
impl->setRegion(region);
}
LatLngBounds MapSnapshotter::getRegion() const {
return impl->getMap().latLngBoundsForCamera(getCameraOptions());
}
style::Style& MapSnapshotter::getStyle() {
return impl->getMap().getStyle();
}
const style::Style& MapSnapshotter::getStyle() const {
return impl->getMap().getStyle();
}
void MapSnapshotter::snapshot(MapSnapshotter::Callback callback) {
impl->snapshot(std::move(callback));
}
void MapSnapshotter::cancel() {
impl->cancel();
}
} // namespace mbgl
| 37.540625 | 120 | 0.659619 |
3901b90912c92ecbec531d317c18d9b08a3fe90d | 2,106 | py | Python | molior/molior/utils.py | randombenj/molior | 5f22935a1860c9ab206acfa52ba6206ae1755594 | [
"Apache-2.0"
] | null | null | null | molior/molior/utils.py | randombenj/molior | 5f22935a1860c9ab206acfa52ba6206ae1755594 | [
"Apache-2.0"
] | null | null | null | molior/molior/utils.py | randombenj/molior | 5f22935a1860c9ab206acfa52ba6206ae1755594 | [
"Apache-2.0"
] | null | null | null | """
Provides utilities for molior core.
"""
import re
import os
import shlex
from launchy import Launchy
from molior.aptly import AptlyApi
from .configuration import Configuration
from .logger import get_logger
logger = get_logger()
def get_aptly_connection():
"""
Connects to aptly server and returns aptly
object.
Returns:
AptlyApi: The connected aptly api instance.
"""
cfg = Configuration()
api_url = cfg.aptly.get("api_url")
gpg_key = cfg.aptly.get("gpg_key")
aptly_user = cfg.aptly.get("user")
aptly_passwd = cfg.aptly.get("pass")
aptly = AptlyApi(api_url, gpg_key, username=aptly_user, password=aptly_passwd)
return aptly
def parse_repository_name(url):
"""
Returns the repository name
of a git clone url.
Args:
url (str): Git clone url to parse
Returns:
name (str): The name of the repository
Examples:
>>> url = 'ssh://git@foo.com:1337/~jon/foobar.git'
>>> parse_repository_name(repo_name)
>>> 'foobar'
or:
>>> url = 'ssh://git@foo.com:1337/~jon/foobar'
>>> parse_repository_name(repo_name)
>>> 'foobar'
"""
if url.endswith(".git"):
search = re.search(r"([0-9a-zA-Z_\-.]+).git$", url)
if search:
return search.group(1)
return os.path.basename(url)
async def get_changelog_attr(name, path):
"""
Gets given changelog attribute from given
repository path.
Args:
name (str): The attr's name.
path (pathlib.Path): The repo's path.
"""
attr = ""
err = ""
async def outh(line):
nonlocal attr
attr += line
async def errh(line):
nonlocal err
err += line
process = Launchy(shlex.split("dpkg-parsechangelog -S {}".format(name)), outh, errh, cwd=str(path))
await process.launch()
ret = await process.wait()
if ret != 0:
logger.error("error occured while getting changelog attribute: %s", str(err, "utf-8"))
raise Exception("error running dpkg-parsechangelog")
return attr.strip()
| 23.931818 | 103 | 0.617759 |
aeeaa34aa466259c0e7351cd90d30fee4b63f984 | 413 | swift | Swift | Sources/App/Models/FederationObject.swift | gaborvass/adatbank-backend | 6f2147ea7b32c4c794a352103264912c5749fd1d | [
"MIT"
] | null | null | null | Sources/App/Models/FederationObject.swift | gaborvass/adatbank-backend | 6f2147ea7b32c4c794a352103264912c5749fd1d | [
"MIT"
] | null | null | null | Sources/App/Models/FederationObject.swift | gaborvass/adatbank-backend | 6f2147ea7b32c4c794a352103264912c5749fd1d | [
"MIT"
] | null | null | null | //
// OrganizationObject.swift
// Adatbank
//
// Created by Vass Gábor on 29/01/16.
// Copyright © 2016 Gabor, Vass. All rights reserved.
//
import Foundation
struct FederationObject : Codable {
let federationId: String
let federationName: String
init(federationId : String, federationName : String) {
self.federationId = federationId
self.federationName = federationName
}
}
| 17.956522 | 58 | 0.694915 |
586a8fff20dd571d0eafd5430f11f59671831d2a | 947 | sql | SQL | tests/utils/db_drop.sql | fga-eps-mds/2021-2-PUMA-UserService | 6496cc6e4a94148e392dd54207290319f8ac61b8 | [
"MIT"
] | null | null | null | tests/utils/db_drop.sql | fga-eps-mds/2021-2-PUMA-UserService | 6496cc6e4a94148e392dd54207290319f8ac61b8 | [
"MIT"
] | 7 | 2022-02-27T16:34:21.000Z | 2022-03-29T22:37:22.000Z | tests/utils/db_drop.sql | fga-eps-mds/2021-2-PUMA-UserService | 6496cc6e4a94148e392dd54207290319f8ac61b8 | [
"MIT"
] | null | null | null | DROP TABLE IF EXISTS lectures CASCADE;
DROP TABLE IF EXISTS is_assigned CASCADE;
DROP TABLE IF EXISTS is_registered CASCADE;
DROP TABLE IF EXISTS participate CASCADE;
DROP TABLE IF EXISTS identifies CASCADE;
DROP TABLE IF EXISTS summarize CASCADE;
DROP TABLE IF EXISTS abstracts CASCADE;
DROP TABLE IF EXISTS TEAM CASCADE;
DROP TABLE IF EXISTS PROJECT CASCADE;
DROP TABLE IF EXISTS KEYWORD CASCADE;
DROP TABLE IF EXISTS SUBAREA CASCADE;
DROP TABLE IF EXISTS KNOWLEDGE_AREA CASCADE;
DROP TABLE IF EXISTS SEMESTER CASCADE;
DROP TABLE IF EXISTS SUBJECT CASCADE;
DROP TABLE IF EXISTS PROFESSOR CASCADE;
DROP TABLE IF EXISTS PHYSICAL_AGENT CASCADE;
DROP TABLE IF EXISTS JURIDICAL_AGENT CASCADE;
DROP TABLE IF EXISTS STUDENT CASCADE;
DROP TABLE IF EXISTS COMMON_USER CASCADE;
DROP TABLE IF EXISTS POST CASCADE;
DROP TABLE IF EXISTS FILE CASCADE;
DROP TYPE stats_post;
DROP TYPE stats_project;
DROP TYPE stats_semester;
DROP TYPE stats_semester_status;
| 35.074074 | 45 | 0.826822 |
25c0f376d9ae58354eed6e30a086ccc6edd4d77f | 821 | js | JavaScript | src/main.js | 1808vue/vue | 9969d3ca1f28a8525207dd2e2c126c988eef533a | [
"MIT"
] | null | null | null | src/main.js | 1808vue/vue | 9969d3ca1f28a8525207dd2e2c126c988eef533a | [
"MIT"
] | null | null | null | src/main.js | 1808vue/vue | 9969d3ca1f28a8525207dd2e2c126c988eef533a | [
"MIT"
] | null | null | null | import "./styls/reset.less"
import Vue from 'vue'
import App from './App'
import Axios from "axios"
// 组件是vue实例的一个子类,所有vue原型链上的属性子类也会继承
Vue.prototype.$axios=Axios
Vue.config.productionTip = false
import router from './router'
// mint-ui的使用
import MintUI from 'mint-ui'
import 'mint-ui/lib/style.css'
Vue.use(MintUI)
import { Upload} from 'element-ui';
Vue.component(Upload.name, Upload);
import 'element-ui/lib/theme-chalk/index.css';
import Vuex from 'vuex';
Vue.use(Vuex);
const store = new Vuex.Store({
state: {
count: 0,
name:'韩梅梅',
shuzu:[]
},
mutations: {
shuju(state,val){
state.shuzu=val;
}
}
})
// 组建是vue实例的一个子类 所有 vue原型链上的属性 子类也会继承
/* eslint-disable no-new */
new Vue({
el: '#app',
router,
store,
components: { App},//注册组件
template: '<App/>' // 用组件元素来替换根元素
}) | 19.093023 | 46 | 0.666261 |
a14b21ba302052eadd6c32b54435059e3ccac4e3 | 3,050 | cpp | C++ | src/VkRenderer/MyShaderWriter.cpp | WubiCookie/VkRenderer | 87cc5d858591fc976c197ab2834e1ac9a418becd | [
"MIT"
] | 2 | 2020-05-31T19:54:19.000Z | 2021-09-14T12:00:12.000Z | src/VkRenderer/MyShaderWriter.cpp | WubiCookie/VkRenderer | 87cc5d858591fc976c197ab2834e1ac9a418becd | [
"MIT"
] | null | null | null | src/VkRenderer/MyShaderWriter.cpp | WubiCookie/VkRenderer | 87cc5d858591fc976c197ab2834e1ac9a418becd | [
"MIT"
] | null | null | null | #include "MyShaderWriter.hpp"
namespace cdm
{
Ubo::Ubo(VertexWriter& writer, std::string const& name, uint32_t bind,
uint32_t set, ast::type::MemoryLayout layout)
: sdw::Ubo(writer, name, bind, set)
{
VkDescriptorSetLayoutBinding b{};
b.binding = bind;
b.descriptorCount = 1;
b.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
b.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
writer.m_descriptors.push_back({ set, b });
}
Ubo::Ubo(FragmentWriter& writer, std::string const& name, uint32_t bind,
uint32_t set, ast::type::MemoryLayout layout)
: sdw::Ubo(writer, name, bind, set)
{
VkDescriptorSetLayoutBinding b{};
b.binding = bind;
b.descriptorCount = 1;
b.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
b.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
writer.m_descriptors.push_back({ set, b });
}
Ubo::Ubo(ComputeWriter& writer, std::string const& name, uint32_t bind,
uint32_t set, ast::type::MemoryLayout layout)
: sdw::Ubo(writer, name, bind, set)
{
VkDescriptorSetLayoutBinding b{};
b.binding = bind;
b.descriptorCount = 1;
b.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
b.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
writer.m_descriptors.push_back({ set, b });
}
UniqueShaderModule VertexWriter::createShaderModule(
const VulkanDevice& vk) const
{
std::vector<uint32_t> bytecode = spirv::serialiseSpirv(getShader());
vk::ShaderModuleCreateInfo createInfo;
createInfo.codeSize = bytecode.size() * sizeof(*bytecode.data());
createInfo.pCode = bytecode.data();
return vk.create(createInfo);
}
VertexShaderHelperResult VertexWriter::createHelperResult(
const VulkanDevice& vk) const
{
return { m_vertexInputHelper, m_descriptors, createShaderModule(vk) };
}
UniqueShaderModule FragmentWriter::createShaderModule(
const VulkanDevice& vk) const
{
std::vector<uint32_t> bytecode = spirv::serialiseSpirv(getShader());
vk::ShaderModuleCreateInfo createInfo;
createInfo.codeSize = bytecode.size() * sizeof(*bytecode.data());
createInfo.pCode = bytecode.data();
return vk.create(createInfo);
}
FragmentShaderHelperResult FragmentWriter::createHelperResult(
const VulkanDevice& vk) const
{
return { m_outputAttachments, m_descriptors, createShaderModule(vk) };
}
void ComputeWriter::addDescriptor(uint32_t binding, uint32_t set, VkDescriptorType type)
{
VkDescriptorSetLayoutBinding b{};
b.binding = binding;
b.descriptorCount = 1;
b.descriptorType = type;
b.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
m_descriptors.push_back({ set, b });
}
UniqueShaderModule ComputeWriter::createShaderModule(
const VulkanDevice& vk) const
{
std::vector<uint32_t> bytecode = spirv::serialiseSpirv(getShader());
vk::ShaderModuleCreateInfo createInfo;
createInfo.codeSize = bytecode.size() * sizeof(*bytecode.data());
createInfo.pCode = bytecode.data();
return vk.create(createInfo);
}
ComputeShaderHelperResult ComputeWriter::createHelperResult(
const VulkanDevice& vk) const
{
return { m_descriptors, createShaderModule(vk) };
}
} // namespace cdm
| 29.047619 | 88 | 0.757377 |
a190745f009fb9b6db0ebf8453959bbaf15261ac | 283 | ts | TypeScript | src/app/aio-chat/index.ts | gioragutt/angular-socketio-game | ae2fd6f1abd16bdc1786e35c6e674efe219ff035 | [
"MIT"
] | 1 | 2017-05-06T20:07:20.000Z | 2017-05-06T20:07:20.000Z | src/app/aio-chat/index.ts | gioragutt/angular-socketio-game | ae2fd6f1abd16bdc1786e35c6e674efe219ff035 | [
"MIT"
] | 1 | 2017-05-14T15:32:14.000Z | 2017-05-14T17:36:48.000Z | src/app/aio-chat/index.ts | gioragutt/angular-socketio-game | ae2fd6f1abd16bdc1786e35c6e674efe219ff035 | [
"MIT"
] | null | null | null | export { AioChatComponent } from './aio-chat/aio-chat.component';
export { MessageInputComponent } from './message-input/message-input.component';
export { MessagesListComponent } from './messages-list/messages-list.component';
export { AioChatModule } from './aio-chat.module';
| 56.6 | 81 | 0.75265 |
c952cfd9faa6ad1dd06dd669a095cd4a6e21b851 | 115 | ts | TypeScript | app/extend/context.ts | whxaxes/egg-plugin-ts-demo | 3965016ec5293ea221d93f095c2df4a374c0b9a1 | [
"MIT"
] | 9 | 2019-06-24T02:19:48.000Z | 2020-03-18T23:01:40.000Z | app/extend/context.ts | whxaxes/egg-plugin-ts-demo | 3965016ec5293ea221d93f095c2df4a374c0b9a1 | [
"MIT"
] | 1 | 2019-06-24T02:26:03.000Z | 2019-06-24T02:26:03.000Z | app/extend/context.ts | whxaxes/egg-plugin-ts-demo | 3965016ec5293ea221d93f095c2df4a374c0b9a1 | [
"MIT"
] | 1 | 2020-11-23T19:20:19.000Z | 2020-11-23T19:20:19.000Z | import { Context } from 'egg';
export default {
getEnv(this: Context) {
return this.app.config.env;
},
};
| 14.375 | 31 | 0.626087 |
404a4c5e5d205ac864690a0cce8ca1704bcf9de4 | 25,684 | ps1 | PowerShell | dashboard.ps1 | psu-libraries/contentdmtools | 3628caae62c367526f0f2e80948dc262632e6d4b | [
"Apache-2.0"
] | 4 | 2019-08-09T11:14:12.000Z | 2021-01-12T19:08:59.000Z | dashboard.ps1 | psu-libraries/contentdmtools | 3628caae62c367526f0f2e80948dc262632e6d4b | [
"Apache-2.0"
] | null | null | null | dashboard.ps1 | psu-libraries/contentdmtools | 3628caae62c367526f0f2e80948dc262632e6d4b | [
"Apache-2.0"
] | null | null | null | # dashboard.ps1
# Nathan Tallman, August 2019.
# CONTENTdm Tools Dashboard
# Variables
$scriptpath = $MyInvocation.MyCommand.Path
$cdmt_root = Split-Path $scriptpath
function Get-Org-Settings {
Write-Verbose "Get-Org-Settings checking for stored settings."
$Return = @{ }
if (Test-Path settings\org.csv) {
$orgcsv = $(Resolve-Path settings\org.csv)
$orgcsv = Import-Csv settings\org.csv
foreach ($org in $orgcsv) {
Write-Verbose ("Public URL: " + $org.public)
$Return.public = $org.public
Write-Verbose ("Server URL: " + $org.server)
$Return.server = $org.server
Write-Verbose ("License: " + $org.license)
$Return.license = $org.license
$Global:cdmt_public = $org.public
$Global:cdmt_server = $org.server
$Global:cdmt_license = $org.license
}
}
}
Get-Org-Settings
$HomePage = New-UDPage -Name "Home" -Content {
New-UDLayout -Columns 2 -Content {
New-UDCard -Title "Getting Started" -Content {
New-UDParagraph -Text "CONTENTdm Tools is a set of PowerShell scripts to assist in building and managing CONTENTdm digital collections. These command line scripts can be run using this web dashboard. Use the menu in the upper-left corner to begin using CONTENTdm Tools. Full documentation for the command line tools, which provides details on the processing, is available on the right."
}
New-UDCard -Title "Documentation" -Content {
New-UDHtml -Markup '<ul><li><a href="https://github.com/psu-libraries/contentdmtools/blob/community/docs/batchCreateCompoundObjects.md" target="_blank" alt="Documentation for Batch Create Compound Objects">Batch Create Compound Objects<a/></li><li><a href="https://github.com/psu-libraries/contentdmtools/blob/community/docs/batchEdit.md" target="_blank" alt="Documentation for Batch Edit Metadata">Batch Edit Metadata</a></li><li><a href="https://github.com/psu-libraries/contentdmtools/blob/community/docs/batchOCR.md" target="_blank" alt="Documentation for Batch Re-OCR">Batch Re-OCR</a></li></ul>'
}
New-UDCard -Title "Contributing" -Content {
New-UDParagraph -Text "CONTENTdm Tools is an open-source project. A link to the GitHub repository is available on the navigation menu. If you have PowerShell scripts you would like to contribute to the toolkit, please submit a pull request!"
}
New-UDCard -Title "Support" -Content {
New-UDParagraph -Text "CONTENTdm Tools was created by Nathan Tallman at Penn State University. Ability to provide support is limited, as is the ability to add new features, but some things may be possible. Contact Nathan at ntt7@psu.edu with questions, comments, and requests."
}
}
}
$Settings = New-UDPage -Name "Settings" -Content {
# The app needs to be restarted after saving the org settings the first time for the -DefaultValue variable value to update. I think it's a caching thing in Universal-Dashboard. All org and user setting should be available immediately after saving regardless and anything can be passed when starting batches anyways. User passwords always need to be stored as secured credentials.;
New-UDLayout -Columns 2 -Content {
New-UDInput -Title "Organizational Settings" -Id "orgSettings" -SubmitText "Save" -Content {
New-UDInputField -Type 'textarea' -Name 'public' -Placeholder 'https://PublicURL.org' -DefaultValue $Global:cdmt_public
New-UDInputField -Type 'textarea' -Name 'server' -Placeholder 'https://AdminURL.org' -DefaultValue $Global:cdmt_server
New-UDInputField -Type 'textbox' -Name 'license' -Placeholder 'XXXX-XXXX-XXXX-XXXX' -DefaultValue $Global:cdmt_license
} -Endpoint {
Param($public, $server, $license)
$org = New-Object -TypeName psobject
$org | Add-Member -MemberType NoteProperty -Name public -Value $public
$org | Add-Member -MemberType NoteProperty -Name server -Value $server
$org | Add-Member -MemberType NoteProperty -Name license -Value $license
$org | Export-Csv "$cdmt_root\settings\org.csv" -NoTypeInformation
$Global:cdmt_public = $public
$Global:cdmt_server = $server
$Global:cdmt_license = $license
New-UDInputAction -Content @(
New-UDCard -Title "Organizational Settings" -Text "Organizational Settings Saved`r`n------------------------------`r`nPublic: $Global:cdmt_public`r`nServer: $Global:cdmt_server`r`nLicense: $Global:cdmt_license"
)
}
New-UDInput -Title "User Settings" -Id "userSettings" -SubmitText "Save" -Content {
New-UDInputField -Type 'textbox' -Name 'user' -Placeholder 'CONTENTdm Username'
New-UDInputField -Type 'password' -Name 'password'
} -Endpoint {
Param($user, $password, $throttle, $staging)
# Still need to update batchEdit to use these settings and see if the password actually works!
$SecurePassword = $($password | ConvertTo-SecureString -AsPlainText -Force)
if (Test-Path settings\user.csv) {
$usrcsv = Import-Csv .\settings\user.csv
if ($usrcsv.user -eq "$user") {
$usrcsv = Import-Csv .\settings\user.csv
$usrcsv | Where-Object { $_.user -eq "$user" } | ForEach-Object {
$_.password = $SecurePassword | ConvertFrom-SecureString
}
$usrcsv | Export-Csv -Path .\settings\user.csv -NoTypeInformation
New-UDInputAction -Content @(
New-UDCard -Title "User Settings" -Text "Existing User Updated: $user`r`n$x"
)
}
else {
[pscustomobject]@{
user = "$user"
password = $SecurePassword | ConvertFrom-SecureString
} | Export-Csv -Path ".\settings\user.csv" -Append -NoTypeInformation
New-UDInputAction -Content @(
New-UDCard -Title "User Settings" -Text "New User Added: $user`r`n"
)
}
}
else {
[pscustomobject]@{
user = "$user"
password = $SecurePassword | ConvertFrom-SecureString
} | Export-Csv -Path ".\settings\user.csv" -Append -NoTypeInformation
New-UDInputAction -Content @(
New-UDCard -Title "User Settings" -Text "New User Saved: $user`r`n"
)
}
}
}
}
$Batch = New-UDPage -Name "Batches" -Content {
New-UDLayout -Columns 1 -Content {
New-UDInput -Title "Batch Create Items and Compound Objects" -Id "createBatch" -SubmitText "Start" -Content {
New-UDInputField -Type 'textarea' -Name 'path' -Placeholder 'C:\path\to\batch'
New-UDInputField -Type 'textbox' -Name 'metadata' -Placeholder 'metadata.csv' -DefaultValue "metadata.csv"
New-UDInputField -Type 'select' -Name 'throttle' -Placeholder "Throttle" -Values @("1", "2", "4", "6", "8") -DefaultValue "4"
New-UDInputField -Type 'select' -Name 'jp2' -Placeholder "JP2 Output" -Values @("true", "false", "skip") -DefaultValue "true"
New-UDInputField -Type 'select' -Name 'ocr' -Placeholder "OCR Output" -Values @("text", "pdf", "both", "extract", "skip") -DefaultValue "both"
#New-UDInputField -Type 'select' -Name 'ocrengine' -Placeholder "OCR Engine" -Values @("ABBYY", "tesseract") -DefaultValue "tesseract"
New-UDInputField -Type 'select' -Name 'originals' -Placeholder @("Originals") -Values @("keep", "discard", "skip") -DefaultValue "keep"
} -Endpoint {
Param($path, $metadata, [int16]$throttle, $jp2, $ocr, $originals)
$scriptblock = "$cdmt_root\batchCreate.ps1 -path $path -metadata $metadata -throttle $throttle -jp2 $jp2 -ocr $ocr -originals $originals"
Start-Process PowerShell.exe -ArgumentList "-NoExit -WindowStyle Maximized -ExecutionPolicy ByPass -Command $scriptblock"
New-UDInputAction -Content @(
New-UDCard -Title "Batch Create Items and Compound Objects" -Text "`nBatch creation has started in a new PowerShell window, you should see running output there. When it's complete, a brief report that includes the path to a log file containing the all output will be shown and you can close the window.`r`n
You can also close the window at any time to halt the batch.`n
------------------------------`n
Path:`t$path`n
Metadata:`t$metadata`n
Throttle:`t$throttle`n
JP2s:`t$jp2`n
OCR:`t$ocr`n
OCR Engine:`t$ocrengine`n
Originals:`t$originals`n
------------------------------`n
Batch Start Time:`t$(Get-Date -Format u)"
)
}
New-UDInput -Title "Batch Edit Metadata" -Id "batchEdit" -SubmitText "Start" -Content {
New-UDInputField -Type 'textbox' -Name 'collection' -Placeholder 'Collection Alias'
New-UDInputField -Type 'textarea' -Name 'server' -Placeholder 'URL for Admin UI' -DefaultValue $Global:cdmt_server
New-UDInputField -Type 'textarea' -Name 'license' -Placeholder 'XXXX-XXXX-XXXX-XXXX' -DefaultValue $Global:cdmt_license
New-UDInputField -Type 'textarea' -Name 'metadata' -Placeholder 'C:\path\to\metadata.csv'
New-UDInputField -Type 'textbox' -Name 'user' -Placeholder 'CONTENTdm Username'
} -Endpoint {
Param($collection, $server, $license, $metadata, $user)
$scriptblock = "$cdmt_root\batchEdit.ps1 -collection $collection -server $server -license $license -csv $metadata -user $user"
Start-Process PowerShell.exe -ArgumentList "-NoExit -WindowStyle Maximized -ExecutionPolicy ByPass -Command $scriptblock"
New-UDInputAction -Content @(
New-UDCard -Title "Batch Edit Metadata" -Text "`rBatch edit has started in a new PowerShell window, you should see running output there. When it's complete, a brief report that includes the path to a log file containing the all output will be shown and you can close the window.`r`n
You can also close the window at any time to halt the batch.`n
------------------------------`n
Collection:`t$collection`n
Server:`t$server`n
License:`t$license`n
Metadata:`t$metadata`n`
User:`t$user`n
------------------------------`n
Batch Start Time`t$(Get-Date -Format u)"
)
}
New-UDInput -Title "Batch OCR a Collection" -Id "batchOCR" -SubmitText "Start" -Content {
New-UDInputField -Type 'textbox' -Name 'collection' -Placeholder 'Collection Alias'
New-UDInputField -Type 'textbox' -Name 'field' -Placeholder 'Fulltext Field'
New-UDInputField -Type 'textarea' -Name 'public' -Placeholder 'URL for Public UI' -DefaultValue $Global:cdmt_public
New-UDInputField -Type 'textarea' -Name 'server' -Placeholder 'URL for Admin UI' -DefaultValue $Global:cdmt_server
New-UDInputField -Type 'textarea' -Name 'license' -Placeholder 'XXXX-XXXX-XXXX-XXXX' -DefaultValue $Global:cdmt_license
New-UDInputField -Type 'textarea' -Name 'path' -Placeholder 'C:\path\to\staging'
New-UDInputField -Type 'textbox' -Name 'user' -Placeholder 'CONTENTdm Username'
New-UDInputField -Type 'select' -Name 'throttle' -Placeholder "Throttle" -Values @("1", "2", "4", "6", "8") -DefaultValue "4"
New-UDInputField -Type 'select' -Name 'method' -Placeholder "Download Method" -Values @("API", "IIIF") -DefaultValue "API"
} -Endpoint {
Param($collection, $field, $public, $server, $license, $path, $user, $throttle, $method)
$scriptblock = "$cdmt_root\batchOCR.ps1 -collection $collection -field $field -public $public -server $server -license $license -path $path -user $user -throttle $throttle -method $method"
Start-Process PowerShell.exe -ArgumentList "-NoExit -WindowStyle Maximized -ExecutionPolicy ByPass -Command $scriptblock"
New-UDInputAction -Content @(
New-UDCard -Title "Batch OCR a Collection" -Text "`nBatch OCR has started in a new PowerShell window, you should see running output there. When it's complete, a brief report that includes the path to a log file containing the all output will be shown and you can close the window.`r`n
You can also close the window at any time to halt the batch.`n
------------------------------`n
Collection:`t`t$collection`n
Field:`t`t$field`n
Public:`t`t$public`n
Server:`t`t$server`n
License:`t`t$license`n
Path:`t`t$path`n
User:`t`t$user`n
Throttle:`t`t$throttle`n
Method:`t`t$method`n
------------------------------`n
Batch Start Time:`t`t$(Get-Date -Format u)"
)
}
}
New-UDLayout -Columns 3 -Content {
New-UDInput -Title "Export Collection Metadata" -Id "exportCollectionMetadata" -SubmitText "Export" -Content {
New-UDInputField -Type 'textbox' -Name 'collection' -Placeholder 'Collection Alias'
New-UDInputField -Type 'textarea' -Name 'server' -Placeholder 'URL for Admin UI' -DefaultValue $Global:cdmt_server
New-UDInputField -Type 'textarea' -Name 'path' -Placeholder 'C:\path\to\staging'
New-UDInputField -Type 'textbox' -Name 'user' -Placeholder 'CONTENTdm Username'
} -Endpoint {
Param($user, $server, $collection, $path)
Write-Debug "Test for existing user credentials; if they exist use the, if they don't prompt for a password. "
if (Test-Path $cdmt_root\settings\user.csv) {
$usrcsv = $(Resolve-Path $cdmt_root\settings\user.csv)
$usrcsv = Import-Csv $usrcsv
$usrcsv | Where-Object { $_.user -eq "$user" } | ForEach-Object {
$SecurePassword = $_.password | ConvertTo-SecureString
$BSTR = [System.Runtime.InteropServices.Marshal]::SecureStringToBSTR($SecurePassword)
$pw = [System.Runtime.InteropServices.Marshal]::PtrToStringAuto($BSTR)
$null = $BSTR
}
if ("$user" -notin $usrcsv.user) {
Write-Output "No user settings found for $user. Enter a password below or store secure credentials using the dashboard."
[SecureString]$password = Read-Host "Enter $user's CONTENTdm password" -AsSecureString
$BSTR = [System.Runtime.InteropServices.Marshal]::SecureStringToBSTR([SecureString]$password)
$pw = [System.Runtime.InteropServices.Marshal]::PtrToStringAuto($BSTR)
$null = $BSTR
}
}
Else {
Write-Output "No user settings file found. Enter a password below or store secure credentials using the dashboard."
[SecureString]$password = Read-Host "Enter $user's CONTENTdm password" -AsSecureString
$BSTR = [System.Runtime.InteropServices.Marshal]::SecureStringToBSTR([SecureString]$password)
$pw = [System.Runtime.InteropServices.Marshal]::PtrToStringAuto($BSTR)
$null = $BSTR
}
$pair = "$($user):$($pw)"
$encodedCreds = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes($pair))
$basicAuthValue = "Basic $encodedCreds"
$Headers = @{
Authorization = $basicAuthValue
}
Invoke-WebRequest "$server/cgi-bin/admin/export.exe?CISODB=/$collection&CISOOP=ascii&CISOMODE=1&CISOPTRLIST=" -Headers $Headers | Out-Null
Invoke-RestMethod "$server/cgi-bin/admin/getfile.exe?CISOMODE=1&CISOFILE=/$collection/index/description/export.txt" -Headers $Headers -OutFile "$path\$collection.txt"
New-UDInputAction -Toast "Collection metadata exported to $path\$collection.txt." -Duration 5000
}
New-UDInput -Title "Unlock Collection Metadata" -Id "unlockCollectionMetadata" -SubmitText "Unlock" -Content {
New-UDInputField -Type 'textbox' -Name 'collection' -Placeholder 'Collection Alias'
New-UDInputField -Type 'textarea' -Name 'server' -Placeholder 'URL for Admin UI' -DefaultValue $Global:cdmt_server
New-UDInputField -Type 'textbox' -Name 'user' -Placeholder 'CONTENTdm Username'
} -Endpoint {
Param($user, $server, $collection)
Write-Debug "Test for existing user credentials; if they exist use the, if they don't prompt for a password. "
if (Test-Path $cdmt_root\settings\user.csv) {
$usrcsv = $(Resolve-Path $cdmt_root\settings\user.csv)
$usrcsv = Import-Csv $usrcsv
$usrcsv | Where-Object { $_.user -eq "$user" } | ForEach-Object {
$SecurePassword = $_.password | ConvertTo-SecureString
$BSTR = [System.Runtime.InteropServices.Marshal]::SecureStringToBSTR($SecurePassword)
$pw = [System.Runtime.InteropServices.Marshal]::PtrToStringAuto($BSTR)
$null = $BSTR
}
if ("$user" -notin $usrcsv.user) {
Write-Output "No user settings found for $user. Enter a password below or store secure credentials using the dashboard."
[SecureString]$password = Read-Host "Enter $user's CONTENTdm password" -AsSecureString
$BSTR = [System.Runtime.InteropServices.Marshal]::SecureStringToBSTR([SecureString]$password)
$pw = [System.Runtime.InteropServices.Marshal]::PtrToStringAuto($BSTR)
$null = $BSTR
}
}
Else {
Write-Output "No user settings file found. Enter a password below or store secure credentials using the dashboard."
[SecureString]$password = Read-Host "Enter $user's CONTENTdm password" -AsSecureString
$BSTR = [System.Runtime.InteropServices.Marshal]::SecureStringToBSTR([SecureString]$password)
$pw = [System.Runtime.InteropServices.Marshal]::PtrToStringAuto($BSTR)
$null = $BSTR
}
$pair = "$($user):$($pw)"
$encodedCreds = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes($pair))
$basicAuthValue = "Basic $encodedCreds"
$Headers = @{
Authorization = $basicAuthValue
}
Invoke-WebRequest "$server/cgi-bin/admin/unlocksubset.exe?CISODB=/$collection&CISOPTRLIST=all&CISOMODE=1" -Headers $Headers | Out-Null
New-UDInputAction -Toast "$collection metadata unlocked." -Duration 5000
}
New-UDInput -Title "Index Collection Metadata" -Id "indexCollectionMetadata" -SubmitText "Index" -Content {
New-UDInputField -Type 'textbox' -Name 'collection' -Placeholder 'Collection Alias'
New-UDInputField -Type 'textarea' -Name 'server' -Placeholder 'URL for Admin UI' -DefaultValue $Global:cdmt_server
New-UDInputField -Type 'textbox' -Name 'user' -Placeholder 'CONTENTdm Username'
} -Endpoint {
Param($user, $server, $collection)
Write-Debug "Test for existing user credentials; if they exist use the, if they don't prompt for a password. "
if (Test-Path $cdmt_root\settings\user.csv) {
$usrcsv = $(Resolve-Path $cdmt_root\settings\user.csv)
$usrcsv = Import-Csv $usrcsv
$usrcsv | Where-Object { $_.user -eq "$user" } | ForEach-Object {
$SecurePassword = $_.password | ConvertTo-SecureString
$BSTR = [System.Runtime.InteropServices.Marshal]::SecureStringToBSTR($SecurePassword)
$pw = [System.Runtime.InteropServices.Marshal]::PtrToStringAuto($BSTR)
$null = $BSTR
}
if ("$user" -notin $usrcsv.user) {
Write-Output "No user settings found for $user. Enter a password below or store secure credentials using the dashboard."
[SecureString]$password = Read-Host "Enter $user's CONTENTdm password" -AsSecureString
$BSTR = [System.Runtime.InteropServices.Marshal]::SecureStringToBSTR([SecureString]$password)
$pw = [System.Runtime.InteropServices.Marshal]::PtrToStringAuto($BSTR)
$null = $BSTR
}
}
Else {
Write-Output "No user settings file found. Enter a password below or store secure credentials using the dashboard."
[SecureString]$password = Read-Host "Enter $user's CONTENTdm password" -AsSecureString
$BSTR = [System.Runtime.InteropServices.Marshal]::SecureStringToBSTR([SecureString]$password)
$pw = [System.Runtime.InteropServices.Marshal]::PtrToStringAuto($BSTR)
$null = $BSTR
}
$pair = "$($user):$($pw)"
$encodedCreds = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes($pair))
$basicAuthValue = "Basic $encodedCreds"
$Headers = @{
Authorization = $basicAuthValue
}
Invoke-WebRequest "$server/cgi-bin/admin/putsched.exe?CISODB=/$collection&CISOOP=index&CISOTYPE1=now" -Headers $Headers | Out-Null
New-UDInputAction -Toast "$collection is currently indexing." -Duration 5000
New-UDLink -Text "CONTENTdm Admin UI" -Url "$server/cgi-bin/admin/bld.exe?CISODB=/$collection"
}
}
New-UDLayout -Columns 2 -Content {
New-UDInput -Title "Collection Alias Look Up" -Id "getCollections" -SubmitText "Look Up" -Content {
New-UDInputField -Type 'textarea' -Name 'server' -Placeholder 'URL for Admin UI' -DefaultValue $Global:cdmt_server
} -Endpoint {
Param($server)
$data = Invoke-RestMethod "$server/dmwebservices/index.php?q=dmGetCollectionList/json"
New-UDInputAction -Content @(
New-UDGrid -Title "Published Collection Alias'" -Headers @("Name", "Alias") -Properties @("name", "secondary_alias") -Endpoint { $data | Out-UDGridData }
)
}
New-UDInput -Title "Collection Field Properties Look Up" -Id "getCollProp" -SubmitText "Look Up" -Content {
New-UDInputField -Type 'textbox' -Name 'collection' -Placeholder 'Collection Alias'
New-UDInputField -Type 'textarea' -Name 'server' -Placeholder 'URL for Admin UI' -DefaultValue $Global:cdmt_server
} -Endpoint {
Param($collection, $server)
$data = Invoke-RestMethod "$server/dmwebservices/index.php?q=dmGetCollectionFieldInfo/$collection/json"
New-UDInputAction -Content @(
New-UDGrid -Title "Collection Field Properties: $collection" -Headers @("Name", "Nickname", "Data Type", "Large", "Searchable", "Hidden", "Admin", "Required", "Controlled Vocab") -Properties @("name", "nick", "type", "size", "search", "hide", "admin", "req", "vocab") -Endpoint { $data | Out-UDGridData }
)
}
}
}
$NavBarLinks = @(
(New-UDLink -Text "Home" -Url "/Home" -Icon home),
(New-UDLink -Text "Settings" -Url "/Settings" -Icon sliders_h),
(New-UDLink -Text "Batches" -Url "/Batches" -Icon play),
(New-UDLink -Text "Documentation" -Url "https://github.com/psu-libraries/contentdmtools/tree/community/docs" -Icon book))
$Navigation = New-UDSideNav -Content {
New-UDSideNavItem -Text "Home" -PageName "Home" -Icon home
New-UDSideNavItem -Text "Settings" -PageName "Settings" -Icon sliders_h
New-UDSideNavItem -Text "Batches" -PageName "Batches" -Icon play
New-UDSideNavItem -Text "Documentation" -Children {
New-UDSideNavItem -Text "Batch Create" -Url 'https://github.com/psu-libraries/contentdmtools/blob/community/docs/batchCreate.md' -Icon plus_square
New-UDSideNavItem -Text "Batch Edit" -Url 'https://github.com/psu-libraries/contentdmtools/blob/community/docs/batchEdit.md' -Icon edit
New-UDSideNavItem -Text "Batch OCR" -Url 'https://github.com/psu-libraries/contentdmtools/blob/community/docs/batchOCR.md' -Icon font
}
}
Enable-UDLogging -Level Info -FilePath "$cdmt_root\logs\dashboard_log.txt" #-Console
$theme = New-UDTheme -Name "cdm-tools" -Definition @{
'::placeholder' = @{
color = 'black'
}
'textarea.materialize-textarea' = @{
'height' = 'auto !important'
'overflow-y' = 'hidden !important'
'resize' = 'none !important'
}
} -Parent "Default"
Start-UDDashboard -Content {
New-UDDashboard -Title "CONTENTdm Tools Dashboard" -Navigation $Navigation -NavbarLinks $NavBarLinks -Theme $theme -Pages @($HomePage, $Batch, $Settings)
} -Port 1000 -Name 'cdm-tools' #-AutoReload | 66.53886 | 613 | 0.617661 |
7469a7e3352ec5bbfb5d9d23449d82b3f168a148 | 2,022 | css | CSS | public/css/login.css | cheansiven/trivia-track | 7650f9d13f860de0c5e90f2e3f5d5fcea8d54cfb | [
"MIT"
] | null | null | null | public/css/login.css | cheansiven/trivia-track | 7650f9d13f860de0c5e90f2e3f5d5fcea8d54cfb | [
"MIT"
] | null | null | null | public/css/login.css | cheansiven/trivia-track | 7650f9d13f860de0c5e90f2e3f5d5fcea8d54cfb | [
"MIT"
] | null | null | null | body {
background: #2ecc71 url("http://38.media.tumblr.com/d23deac40b06633b79520a8552f40b94/tumblr_nb1uhrRrge1st5lhmo1_1280.jpg") no-repeat center center fixed;
-webkit-background-size: cover;
-moz-background-size: cover;
-o-background-size: cover;
background-size: cover;
font-family: "Roboto";
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
body::before {
z-index: -1;
content: '';
position: fixed;
top: 0;
left: 0;
background: #099099;
/* IE Fallback */
background: rgba(9, 144, 153, 0.8);
width: 100%;
height: 100%;
}
.form {
position: absolute;
top: 50%;
left: 50%;
background: #fff;
width: 285px;
margin: -140px 0 0 -182px;
padding: 40px;
box-shadow: 0 0 3px rgba(0, 0, 0, 0.3);
}
.form h2 {
margin: 0 0 20px;
line-height: 1;
color: #099099;
font-size: 22px;
font-weight: 400;
text-align: center;
text-transform: uppercase;
}
.form input {
outline: none;
display: block;
width: 100%;
margin: 0 0 20px;
padding: 10px 15px;
border: 1px solid #ccc;
color: #ccc;
font-family: "Roboto";
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box;
font-size: 14px;
font-wieght: 400;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
-webkit-transition: 0.2s linear;
-moz-transition: 0.2s linear;
-ms-transition: 0.2s linear;
-o-transition: 0.2s linear;
transition: 0.2s linear;
}
.form input:focus {
color: #333;
border: 1px solid #099099;
}
.form input[type="submit"] {
cursor: pointer;
background: #099099;
width: 100%;
padding: 10px 15px;
border: 0;
color: #fff;
font-family: "Roboto";
font-size: 14px;
font-weight: 400;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
-webkit-transition: 0.2s linear;
-moz-transition: 0.2s linear;
-ms-transition: 0.2s linear;
-o-transition: 0.2s linear;
transition: 0.2s linear;
}
.form input[type="submit"]:hover {
background: #097c84;
}
| 22.719101 | 155 | 0.667161 |
5ae11a799c495bdb135f12cf80ee513c6bbd408d | 1,399 | lua | Lua | ld48/app/window.lua | aglitchman/defold-ld48-game | 04844f623adcad675307566b0256d0a0a6145942 | [
"Apache-2.0"
] | 7 | 2021-04-27T12:52:16.000Z | 2021-08-08T19:30:10.000Z | ld48/app/window.lua | aglitchman/defold-ld48-game | 04844f623adcad675307566b0256d0a0a6145942 | [
"Apache-2.0"
] | null | null | null | ld48/app/window.lua | aglitchman/defold-ld48-game | 04844f623adcad675307566b0256d0a0a6145942 | [
"Apache-2.0"
] | null | null | null | local M = {}
function M.resize(game_width, game_height)
if sys.get_sys_info().system_name == "Windows" then
local displays = defos.get_displays()
local current_display_id = defos.get_current_display_id()
local screen_width = displays[current_display_id].bounds.width
local screen_height = displays[current_display_id].bounds.height
game_width = game_width or 2500
game_height = game_height or 1400
local factor = 0.5
if tonumber(sys.get_config("display.high_dpi", 0)) == 1 then
factor = 1
end
local x, y, w, h = defos.get_view_size()
w = game_width * factor
h = game_height * factor
while screen_height * 0.9 <= h do
w = w / 1.25
h = h / 1.25
end
defos.set_view_size(x, y, w, h)
end
end
function M.center()
if sys.get_sys_info().system_name == "Windows" then
local displays = defos.get_displays()
local current_display_id = defos.get_current_display_id()
local screen_width = displays[current_display_id].bounds.width
local screen_height = displays[current_display_id].bounds.height
local x, y, w, h = defos.get_window_size()
x = math.floor((screen_width - w) / 2)
y = math.floor((screen_height - h) / 2)
defos.set_window_size(x, y, w, h)
end
end
return M
| 31.795455 | 72 | 0.626876 |
7f7ee248b1488b30e1cba6e66066811cce49ba75 | 3,119 | rs | Rust | storage/src/utils.rs | changweige/image-service | ba35a388fd12a9c833d20b48a591d029b6af10b5 | [
"Apache-2.0"
] | 1 | 2021-03-29T12:30:31.000Z | 2021-03-29T12:30:31.000Z | storage/src/utils.rs | changweige/image-service | ba35a388fd12a9c833d20b48a591d029b6af10b5 | [
"Apache-2.0"
] | null | null | null | storage/src/utils.rs | changweige/image-service | ba35a388fd12a9c833d20b48a591d029b6af10b5 | [
"Apache-2.0"
] | null | null | null | // Copyright 2020 Ant Group. All rights reserved.
//
// SPDX-License-Identifier: Apache-2.0
use std::io::{ErrorKind, Result};
use std::os::unix::io::RawFd;
use std::slice::from_raw_parts_mut;
use libc::off64_t;
use nix::sys::uio::{preadv, IoVec};
use vm_memory::{Bytes, VolatileSlice};
use nydus_utils::{
digest::{self, RafsDigest},
round_down_4k,
};
pub fn readv(fd: RawFd, bufs: &[VolatileSlice], offset: u64, max_size: usize) -> Result<usize> {
if bufs.is_empty() {
return Ok(0);
}
let mut size: usize = 0;
let mut iovecs: Vec<IoVec<&mut [u8]>> = Vec::new();
for buf in bufs {
let mut exceed = false;
let len = if size + buf.len() > max_size {
exceed = true;
max_size - size
} else {
buf.len()
};
size += len;
let iov = IoVec::from_mut_slice(unsafe { from_raw_parts_mut(buf.as_ptr(), len) });
iovecs.push(iov);
if exceed {
break;
}
}
loop {
let ret = preadv(fd, &iovecs, offset as off64_t).map_err(|_| last_error!());
match ret {
Ok(ret) => {
return Ok(ret);
}
Err(err) => {
// Retry if the IO is interrupted by signal.
if err.kind() != ErrorKind::Interrupted {
return Err(err);
}
}
}
}
}
pub fn copyv(src: &[u8], dst: &[VolatileSlice], offset: u64, mut max_size: usize) -> Result<usize> {
let mut offset = offset as usize;
let mut size: usize = 0;
if max_size > src.len() {
max_size = src.len()
}
for s in dst.iter() {
if offset >= src.len() || size >= src.len() {
break;
}
let mut len = max_size - size;
if offset + len > src.len() {
len = src.len() - offset;
}
if len > s.len() {
len = s.len();
}
s.write_slice(&src[offset..offset + len], 0)
.map_err(|e| einval!(e))?;
offset += len;
size += len;
}
Ok(size)
}
/// A customized readahead function to ask kernel to fault in all pages from offset to end.
///
/// Call libc::readahead on every 128KB range because otherwise readahead stops at kernel bdi
/// readahead size which is 128KB by default.
pub fn readahead(fd: libc::c_int, mut offset: u64, end: u64) {
let mut count;
offset = round_down_4k(offset);
loop {
if offset >= end {
break;
}
// Kernel default 128KB readahead size
count = std::cmp::min(128 << 10, end - offset);
unsafe { libc::readahead(fd, offset as i64, count as usize) };
offset += count;
}
}
/// A customized buf allocator that avoids zeroing
pub fn alloc_buf(size: usize) -> Vec<u8> {
let mut buf = Vec::with_capacity(size);
unsafe { buf.set_len(size) };
buf
}
/// Check hash of data matches provided one
pub fn digest_check(data: &[u8], digest: &RafsDigest, digester: digest::Algorithm) -> bool {
digest == &RafsDigest::from_buf(data, digester)
}
| 27.121739 | 100 | 0.543764 |
9ffe2c4da4e9b83401494fcd380eacbfc844a462 | 721 | py | Python | resources/register/swagger_doc.py | Yogev911/sms-service | 18e5c6ba55d354ea59bc0eceed31a0e01ecb5ebb | [
"MIT"
] | null | null | null | resources/register/swagger_doc.py | Yogev911/sms-service | 18e5c6ba55d354ea59bc0eceed31a0e01ecb5ebb | [
"MIT"
] | null | null | null | resources/register/swagger_doc.py | Yogev911/sms-service | 18e5c6ba55d354ea59bc0eceed31a0e01ecb5ebb | [
"MIT"
] | null | null | null | from resources.common_modles import UserModel
register_post = {
'tags': ['register'],
'description': 'register new user',
'parameters': [
{
'name': 'user',
'description': 'Request users credentials',
'in': 'body',
'schema': UserModel,
'required': True
}
],
'responses': {
'201': {
'description': 'User register successfully',
},
'401': {
'description': 'User already registered'
},
'406': {
'description': 'Phone number or other params are incorrect'
},
'501': {
'description': 'Internal server error'
}
}
}
| 24.033333 | 71 | 0.474341 |
07d80926e716495eb42cc264e3ee393a3d616fa4 | 151 | css | CSS | css/newstyle.css | alanxoc3/pico8-api | 7a63ee68bb0c4c06f3be497e540fa58db30bf15c | [
"CC-BY-4.0"
] | 17 | 2020-05-21T01:12:19.000Z | 2022-01-26T16:21:06.000Z | css/newstyle.css | alanxoc3/pico8-api | 7a63ee68bb0c4c06f3be497e540fa58db30bf15c | [
"CC-BY-4.0"
] | 1 | 2020-06-10T10:10:12.000Z | 2020-06-10T10:10:12.000Z | css/newstyle.css | alanxoc3/pico8-api | 7a63ee68bb0c4c06f3be497e540fa58db30bf15c | [
"CC-BY-4.0"
] | 2 | 2021-01-13T03:51:01.000Z | 2021-02-11T15:20:52.000Z | .izoom img:hover {
transform: scaleX(-1);
-webkit-transform: scaleX(-1);
}
.imgrow {
display: flex;
}
.imgcol {
flex: 50%;
padding: 5px;
}
| 10.785714 | 32 | 0.596026 |
a06455f5036e4e20a5090b6b1cc91c973498d620 | 1,713 | kt | Kotlin | app/src/main/java/com/covidapp/db/LocalCacheManager.kt | abhi1904/CovidApp | f0902aa10a48f79938019ca7b010e4260469e318 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/covidapp/db/LocalCacheManager.kt | abhi1904/CovidApp | f0902aa10a48f79938019ca7b010e4260469e318 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/covidapp/db/LocalCacheManager.kt | abhi1904/CovidApp | f0902aa10a48f79938019ca7b010e4260469e318 | [
"Apache-2.0"
] | null | null | null | package com.covidapp.db
import android.annotation.SuppressLint
import android.content.Context
import androidx.room.Room
import com.covidapp.CovidApplication
import com.covidapp.di.APIComponent
import com.covidapp.services.model.Country
import com.covidapp.util.Constants.Companion.DB_NAME
import io.reactivex.Completable
import io.reactivex.CompletableObserver
import io.reactivex.android.schedulers.AndroidSchedulers
import io.reactivex.disposables.Disposable
import io.reactivex.schedulers.Schedulers
import retrofit2.Response
class LocalCacheManager(context: Context) {
private val countryDBDao: CountryDBDao =
Room.databaseBuilder(context, AppDatabase::class.java, DB_NAME)
.allowMainThreadQueries()
.build().countryDBDao
fun saveCountries(countries: List<Country>, responseCallback: ResponseCallback) {
Completable.fromAction { countryDBDao.saveCountries(countries) }
.observeOn(AndroidSchedulers.mainThread())
.subscribeOn(Schedulers.io()).subscribe(object : CompletableObserver {
override fun onSubscribe(d: Disposable) {}
override fun onComplete() {
responseCallback.onSaveCountries()
}
override fun onError(e: Throwable) {
responseCallback.onError()
}
})
}
@SuppressLint("CheckResult")
fun getCountries(responseCallback: ResponseCallback) {
countryDBDao.getCountries().subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe { countries ->
responseCallback.onGetCountry(countries)
}
}
}
| 32.942308 | 85 | 0.692353 |
355d4d7c29f6a165699efb300750a17e0c77ac6a | 2,190 | dart | Dart | lib/txns/screen/txns_choose_account.dart | LeastAuthority/Mina-StakingPower-Wallet | 941b04458d6c3a988d988e63b2c4739a6cedb179 | [
"MIT"
] | 36 | 2021-03-04T13:42:00.000Z | 2022-02-22T08:44:18.000Z | lib/txns/screen/txns_choose_account.dart | LeastAuthority/Mina-StakingPower-Wallet | 941b04458d6c3a988d988e63b2c4739a6cedb179 | [
"MIT"
] | null | null | null | lib/txns/screen/txns_choose_account.dart | LeastAuthority/Mina-StakingPower-Wallet | 941b04458d6c3a988d988e63b2c4739a6cedb179 | [
"MIT"
] | 10 | 2021-03-10T03:59:00.000Z | 2021-12-28T19:52:40.000Z | import 'package:coda_wallet/event_bus/event_bus.dart';
import 'package:coda_wallet/widget/account/account_list.dart';
import 'package:coda_wallet/widget/app_bar/app_bar.dart';
import 'package:coda_wallet/widget/ui/custom_gradient.dart';
import 'package:flutter/material.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
class TxnsChooseAccountScreen extends StatefulWidget {
TxnsChooseAccountScreen({Key? key}) : super(key: key);
@override
_TxnsChooseAccountScreenState createState() => _TxnsChooseAccountScreenState();
}
class _TxnsChooseAccountScreenState extends State<TxnsChooseAccountScreen> {
@override
void initState() {
super.initState();
}
@override
void dispose() {
super.dispose();
}
@override
Widget build(BuildContext context) {
print('TxnsChooseAccountScreen: build(context: $context)');
return Scaffold(
resizeToAvoidBottomInset: false,
backgroundColor: Colors.white,
appBar: buildNoTitleAppBar(context, leading: false),
body: Container(
child: _buildSendFromBody(context),
decoration: BoxDecoration(
gradient: backgroundGradient
),
)
);
}
_buildSendFromBody(BuildContext context) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Container(height: 30.h),
Padding(
padding: EdgeInsets.only(left: 29.w, right: 29.w),
child: Text('Transactions', textAlign: TextAlign.left,
style: TextStyle(fontSize: 28.sp, color: Color(0xff2d2d2d), fontWeight: FontWeight.w500)),
),
Container(height: 28.h),
Padding(
padding: EdgeInsets.only(left: 29.w, right: 29.w),
child: Text('SELECT AN ACCOUNT', textAlign: TextAlign.left,
style: TextStyle(fontSize: 13.sp, color: Color(0xff2d2d2d), fontWeight: FontWeight.w500)),
),
Container(height: 10.h),
Expanded(
flex: 1,
child: buildAccountList(
(index) {
eventBus.fire(ChooseAccountTxns(index));
Navigator.of(context).pop();
}
)
)
],
);
}
}
| 30 | 102 | 0.654795 |
ef6701875e3bfef6960c68c7ab172808f5624875 | 1,223 | js | JavaScript | scripts/app/home/HomeCtrl.js | spaceshipsamurai/samurai-home | 0222034b07bfc6b85111336401ce916ddf4977f9 | [
"Apache-2.0"
] | null | null | null | scripts/app/home/HomeCtrl.js | spaceshipsamurai/samurai-home | 0222034b07bfc6b85111336401ce916ddf4977f9 | [
"Apache-2.0"
] | null | null | null | scripts/app/home/HomeCtrl.js | spaceshipsamurai/samurai-home | 0222034b07bfc6b85111336401ce916ddf4977f9 | [
"Apache-2.0"
] | null | null | null | angular.module('ssHome').controller('HomeCtrl', function($scope, ssYoutubeFeed) {
ssYoutubeFeed.getRecent(3).then(function(videos) {
$scope.videos = videos;
});
$scope.contacts = [
{
id: 1694010657,
name: 'Securitas Protector',
title: 'Alliance Exec.'
},
{
id: 92988604,
name: 'Aben Sokoln',
title: '2nd in command'
},
{
id: 92940230,
name: 'Xeron Vann',
title: 'Diplomacy Director'
},
{
id: 92507729,
name: 'Dex Thiesant',
title: 'IT Director'
}
];
$scope.corporations = [
{
id: 98041262,
name: 'Community against Justice'
},
{
id: 851230678,
name: 'The New Era'
},
{
id: 555073675,
name: 'The Green Cross'
},
{
id: 692190945,
name: 'Shinigami Miners'
},
{
id: 98097817,
name: 'Concordiat'
},
{
id: 98189045,
name: 'Pax Capital'
}
];
});
| 21.086207 | 81 | 0.404742 |
e83f3e603553843781e6b6f9e4310b5b6f0162fd | 809 | cs | C# | Source/Scripting/bsfSharp/Serialization/HideInInspector.cs | bsf2dev/bsf | b318cd4eb1b0299773d625e6c870b8d503cf539e | [
"MIT"
] | 1,745 | 2018-03-16T02:10:28.000Z | 2022-03-26T17:34:21.000Z | Source/Scripting/bsfSharp/Serialization/HideInInspector.cs | bsf2dev/bsf | b318cd4eb1b0299773d625e6c870b8d503cf539e | [
"MIT"
] | 395 | 2018-03-16T10:18:20.000Z | 2021-08-04T16:52:08.000Z | Source/Scripting/bsfSharp/Serialization/HideInInspector.cs | bsf2dev/bsf | b318cd4eb1b0299773d625e6c870b8d503cf539e | [
"MIT"
] | 267 | 2018-03-17T19:32:54.000Z | 2022-02-17T16:55:50.000Z | //********************************* bs::framework - Copyright 2018-2019 Marko Pintera ************************************//
//*********** Licensed under the MIT license. See LICENSE.md for full terms. This notice is not to be removed. ***********//
using System;
namespace bs
{
/** @addtogroup Serialization
* @{
*/
/// <summary>
/// Attribute that prevents a field or a property from being visible in the inspector window in editor. Normally those
/// fields are public fields of a <see cref="Component"/>, <see cref="Resource"/> or a class marked with a
/// <see cref="SerializeObject"/> attribute.
/// </summary>
[AttributeUsage(AttributeTargets.Field | AttributeTargets.Property)]
public sealed class HideInInspector : Attribute
{
}
/** @} */
}
| 35.173913 | 125 | 0.587145 |
c382672f64a157192b6624ef01d2de67c8479339 | 1,442 | sql | SQL | JavaScript/ajax-examples/user.sql | michaeltharper/codelab | 572e3636897be3cfe63ee7e8b658641fe6cddb46 | [
"Apache-2.0"
] | null | null | null | JavaScript/ajax-examples/user.sql | michaeltharper/codelab | 572e3636897be3cfe63ee7e8b658641fe6cddb46 | [
"Apache-2.0"
] | null | null | null | JavaScript/ajax-examples/user.sql | michaeltharper/codelab | 572e3636897be3cfe63ee7e8b658641fe6cddb46 | [
"Apache-2.0"
] | null | null | null | -- phpMyAdmin SQL Dump
-- version 3.5.8
-- http://www.phpmyadmin.net
--
-- Host: localhost:3306
-- Generation Time: May 26, 2013 at 02:04 PM
-- Server version: 5.5.30
-- PHP Version: 5.4.14
SET SQL_MODE="NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
--
-- Database: `ajaxtest`
--
-- --------------------------------------------------------
--
-- Table structure for table `user`
--
CREATE TABLE IF NOT EXISTS `user` (
`id` int(5) NOT NULL AUTO_INCREMENT,
`FirstName` char(40) NOT NULL,
`LastName` char(50) NOT NULL,
`Age` int(3) NOT NULL,
`Hometown` char(40) NOT NULL,
`Job` char(50) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 AUTO_INCREMENT=5 ;
--
-- Dumping data for table `user`
--
INSERT INTO `user` (`id`, `FirstName`, `LastName`, `Age`, `Hometown`, `Job`) VALUES
(1, 'Peter', 'Griffin', 42, 'Quahog', 'Brewery'),
(2, 'Lois', 'Griffin', 40, 'Newport', 'Teacher'),
(3, 'Joseph', 'Swanson', 30, 'Quahog', 'Police'),
(4, 'Glenn', 'Quagmire', 35, 'Quahog', 'Pilot');
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
| 27.730769 | 83 | 0.651872 |
07e99f3353f3ef9d54716d95befcbe21841aed95 | 2,096 | css | CSS | Aplikasi/cihanjuang/assets/css/print-css/960.css | k-sandi/pkm-poltekpos | d768057a394cf11f1f424efd144a87f243aa2e97 | [
"Apache-2.0"
] | null | null | null | Aplikasi/cihanjuang/assets/css/print-css/960.css | k-sandi/pkm-poltekpos | d768057a394cf11f1f424efd144a87f243aa2e97 | [
"Apache-2.0"
] | null | null | null | Aplikasi/cihanjuang/assets/css/print-css/960.css | k-sandi/pkm-poltekpos | d768057a394cf11f1f424efd144a87f243aa2e97 | [
"Apache-2.0"
] | 2 | 2021-07-22T16:18:19.000Z | 2021-07-22T17:18:56.000Z | /* Containers
----------------------------------------------------------------------------------------------------*/
.container_12 {
margin-left: auto;
margin-right: auto;
width: 560px;
border:1px solid #ddd;
border-radius:4px;
}
/* Grid >> Children (Alpha ~ First, Omega ~ Last)
----------------------------------------------------------------------------------------------------*/
.alpha {
margin-left: 0 !important;
}
.omega {
margin-right: 0 !important;
}
/* Grid >> Global
----------------------------------------------------------------------------------------------------*/
.grid_7,
.grid_12{
display:inline;
float: left;
position: relative;
margin-left: 10.0px;
margin-right: 10.0px;
}
/* Grid >> 2 Columns
----------------------------------------------------------------------------------------------------*/
.container_12 .grid_7{
width:540px;
}
/* Prefix Extra Space >> 2 Columns
----------------------------------------------------------------------------------------------------*/
.container_12 .prefix_7 {
padding-left:560px;
}
/* Suffix Extra Space >> 2 Columns
----------------------------------------------------------------------------------------------------*/
.container_12 .suffix_7 {
padding-right:560px;
}
/* Push Space >> 2 Columns
----------------------------------------------------------------------------------------------------*/
.container_12 .push_7 {
left:560px;
}
/* Pull Space >> 2 Columns
----------------------------------------------------------------------------------------------------*/
.container_12 .pull_7 {
right:560px;
}
/* Clear Floated Elements
----------------------------------------------------------------------------------------------------*/
.clear {
clear: both;
display: block;
overflow: hidden;
visibility: hidden;
width: 0;
height: 0;
}
.clearfix:after {
clear: both;
content: ' ';
display: block;
font-size: 0;
line-height: 0;
visibility: hidden;
width: 0;
height: 0;
}
.clearfix {
display: inline-block;
}
* html .clearfix {
height: 1%;
}
.clearfix {
display: block;
}
| 21.387755 | 102 | 0.363073 |
218e926e193d3bd2e7ec9734eeb57da957037d3e | 309 | js | JavaScript | backend/src/server/actions/save-event.js | marcmenn/ibeacon | bae3749c7f5e4d7320759dc99fcad25d9c53ab31 | [
"MIT"
] | 4 | 2020-03-23T11:06:50.000Z | 2020-04-12T11:03:45.000Z | backend/src/server/actions/save-event.js | marcmenn/ibeacon | bae3749c7f5e4d7320759dc99fcad25d9c53ab31 | [
"MIT"
] | 3 | 2020-03-29T18:49:24.000Z | 2021-05-11T07:40:20.000Z | backend/src/server/actions/save-event.js | marcmenn/ibeacon | bae3749c7f5e4d7320759dc99fcad25d9c53ab31 | [
"MIT"
] | null | null | null | import event from './event.js'
import { json, jsonOnly } from './json.js'
import withBeaconIdFromDatabase from './with-beacon-id-from-database.js'
import withDeviceId from './with-device-id.js'
export default (type) => [
withDeviceId,
json,
jsonOnly,
withBeaconIdFromDatabase(true),
event(type),
]
| 23.769231 | 72 | 0.724919 |
99e532df9beb7587e655ef6e1140fc111900aeb4 | 2,786 | rs | Rust | src/lib.rs | mith-mmk/rust-rect | 48544c09c6320e4413d740f31fc3df2ef7452425 | [
"Apache-2.0",
"MIT"
] | null | null | null | src/lib.rs | mith-mmk/rust-rect | 48544c09c6320e4413d740f31fc3df2ef7452425 | [
"Apache-2.0",
"MIT"
] | null | null | null | src/lib.rs | mith-mmk/rust-rect | 48544c09c6320e4413d740f31fc3df2ef7452425 | [
"Apache-2.0",
"MIT"
] | null | null | null | mod utils;
use wasm_bindgen::prelude::*;
// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global
// allocator.
#[cfg(feature = "wee_alloc")]
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
#[wasm_bindgen]
extern {
fn alert(s: &str);
#[wasm_bindgen(js_namespace = Math)]
fn random() -> f64;
#[wasm_bindgen(js_namespace = console)]
fn log(s: &str);
}
fn rand_u32(range: u32) -> u32 {
return ( random() * (range as f64)) as u32;
}
#[wasm_bindgen]
pub struct Universe {
width: u32,
height: u32,
buffer: Vec<u8>,
}
#[wasm_bindgen]
impl Universe {
pub fn new (width: u32, height: u32) -> Universe {
let buffersize = width * height * 4;
let buffer = (0..buffersize)
.map(|_| {0})
.collect();
Universe {
width,
height,
buffer,
}
}
pub fn width(&self) -> u32 {
self.width
}
pub fn height(&self) -> u32 {
self.height
}
pub fn buffer(&self) -> *const u8 {
self.buffer.as_ptr()
}
pub fn fillbox(&mut self,color: u32){
let height = self.height;
let width = self.width;
let buf = &mut self.buffer;
// Color model u32 LE (RGBA) -> u8 BGRA
let red: u8 = ((color >> 16) & 0xff) as u8; // R = 1.0
let green: u8 = ((color >> 8) & 0xff) as u8; // G = 1.0
let blue: u8 = ((color >> 0) & 0xff) as u8; // B = 1.0
let alpha: u8 = 0xff;
log(&format!("{} {} {}",blue,green,red));
for y in 0..height {
let offset = y * width * 4;
for x in 0..width {
let pos :usize = (offset + x * 4) as usize;
buf[pos] = red;
buf[pos + 1] = green;
buf[pos + 2] = blue;
buf[pos + 3] = alpha;
}
}
}
pub fn fillrandomrect(&mut self){
let height = self.height;
let width = self.width;
let buf = &mut self.buffer;
let startx:u32 = rand_u32(width);
let starty:u32 = rand_u32(height);
let endx:u32 = rand_u32(width-startx);
let endy:u32 = rand_u32(height-starty);
let red:u8 = rand_u32(255) as u8;
let green:u8 = rand_u32(255) as u8;
let blue:u8 = rand_u32(255) as u8;
let alpha:u8 = rand_u32(255) as u8;
for y in starty..endy {
let offset = y * width * 4;
for x in startx..endx {
let pos :usize= (offset + (x * 4)) as usize;
buf[pos] = red;
buf[pos + 1] = green;
buf[pos + 2] = blue;
buf[pos + 3] = alpha;
}
}
}
}
| 25.559633 | 73 | 0.488155 |
5c15d83c645caf6a9a18e8a2492fcd953f4f973e | 10,087 | rs | Rust | rumqttd/src/lib.rs | Apolexian/rumqtt | d9be4d14e71e8d5e7a783c9b5f8eeb6c8dfef684 | [
"Apache-2.0"
] | null | null | null | rumqttd/src/lib.rs | Apolexian/rumqtt | d9be4d14e71e8d5e7a783c9b5f8eeb6c8dfef684 | [
"Apache-2.0"
] | null | null | null | rumqttd/src/lib.rs | Apolexian/rumqtt | d9be4d14e71e8d5e7a783c9b5f8eeb6c8dfef684 | [
"Apache-2.0"
] | null | null | null | #[macro_use]
extern crate log;
use serde::{Deserialize, Serialize};
use std::time::Duration;
use std::{io, thread};
use std::{net::SocketAddr, sync::Arc};
use mqttbytes::v4::Packet;
use rumqttlog::*;
use tokio::time::error::Elapsed;
use crate::remotelink::RemoteLink;
use tokio::io::{AsyncRead, AsyncWrite};
use tokio::{task, time};
use quic_socket::{QuicClient, QuicServer, QuicSocket};
pub mod async_locallink;
mod consolelink;
mod locallink;
mod network;
mod remotelink;
mod state;
use crate::consolelink::ConsoleLink;
pub use crate::locallink::{LinkError, LinkRx, LinkTx};
use crate::network::Network;
use std::collections::HashMap;
#[derive(Debug, thiserror::Error)]
#[error("Acceptor error")]
pub enum Error {
#[error("I/O {0}")]
Io(#[from] io::Error),
#[error("Connection error {0}")]
Connection(#[from] remotelink::Error),
#[error("Timeout")]
Timeout(#[from] Elapsed),
#[error("Channel recv error")]
Recv(#[from] RecvError),
#[error("Channel send error")]
Send(#[from] SendError<(Id, Event)>),
#[error("Server cert not provided")]
ServerCertRequired,
#[error("Server private key not provided")]
ServerKeyRequired,
#[error("CA file {0} no found")]
CaFileNotFound(String),
#[error("Server cert file {0} not found")]
ServerCertNotFound(String),
#[error("Server private key file {0} not found")]
ServerKeyNotFound(String),
#[error("Invalid CA cert file {0}")]
InvalidCACert(String),
#[error("Invalid server cert file {0}")]
InvalidServerCert(String),
#[error("Invalid server pass")]
InvalidServerPass(),
#[error("Invalid server key file {0}")]
InvalidServerKey(String),
RustlsNotEnabled,
NativeTlsNotEnabled,
Disconnected,
NetworkClosed,
WrongPacket(Packet),
}
type Id = usize;
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
pub struct Config {
pub id: usize,
pub router: rumqttlog::Config,
pub servers: HashMap<String, ServerSettings>,
pub cluster: Option<HashMap<String, MeshSettings>>,
pub replicator: Option<ConnectionSettings>,
pub console: ConsoleSettings,
}
#[allow(dead_code)]
enum ServerTLSAcceptor {
#[cfg(feature = "use-rustls")]
RustlsAcceptor { acceptor: tokio_rustls::TlsAcceptor },
#[cfg(feature = "use-native-tls")]
NativeTLSAcceptor {
acceptor: tokio_native_tls::TlsAcceptor,
},
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(untagged)]
pub enum ServerCert {
RustlsCert {
ca_path: String,
cert_path: String,
key_path: String,
},
NativeTlsCert {
pkcs12_path: String,
pkcs12_pass: String,
},
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ServerSettings {
pub listen: SocketAddr,
pub cert: Option<ServerCert>,
pub next_connection_delay_ms: u64,
pub connections: ConnectionSettings,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ConnectionLoginCredentials {
pub username: String,
pub password: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ConnectionSettings {
pub connection_timeout_ms: u16,
pub max_client_id_len: usize,
pub throttle_delay_ms: u64,
pub max_payload_size: usize,
pub max_inflight_count: u16,
pub max_inflight_size: usize,
pub login_credentials: Option<Vec<ConnectionLoginCredentials>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MeshSettings {
pub address: SocketAddr,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConsoleSettings {
pub listen: SocketAddr,
}
impl Default for ServerSettings {
fn default() -> Self {
panic!("Server settings should be derived from a configuration file")
}
}
impl Default for ConnectionSettings {
fn default() -> Self {
panic!("Server settings should be derived from a configuration file")
}
}
impl Default for ConsoleSettings {
fn default() -> Self {
panic!("Console settings should be derived from configuration file")
}
}
pub struct Broker {
config: Arc<Config>,
router_tx: Sender<(Id, Event)>,
router: Option<Router>,
}
impl Broker {
pub fn new(config: Config) -> Broker {
let config = Arc::new(config);
let router_config = Arc::new(config.router.clone());
let (router, router_tx) = Router::new(router_config);
Broker {
config,
router_tx,
router: Some(router),
}
}
pub fn router_handle(&self) -> Sender<(Id, Event)> {
self.router_tx.clone()
}
pub fn link(&self, client_id: &str) -> Result<LinkTx, LinkError> {
// Register this connection with the router. Router replies with ack which if ok will
// start the link. Router can sometimes reject the connection (ex max connection limit)
let tx = LinkTx::new(client_id, self.router_tx.clone());
Ok(tx)
}
pub fn start(&mut self) -> Result<(), Error> {
// spawn the router in a separate thread
let mut router = self.router.take().unwrap();
let router_thread = thread::Builder::new().name("rumqttd-router".to_owned());
router_thread.spawn(move || router.start())?;
// spawn servers in a separate thread
for (id, config) in self.config.servers.clone() {
let server_name = format!("rumqttd-server-{}", id);
let server_thread = thread::Builder::new().name(server_name);
let server = Server::new(id, config, self.router_tx.clone());
server_thread.spawn(move || {
let mut runtime = tokio::runtime::Builder::new_current_thread();
let runtime = runtime.enable_all().build().unwrap();
runtime.block_on(async { server.start().await });
})?;
}
let mut runtime = tokio::runtime::Builder::new_current_thread();
let runtime = runtime.enable_all().build().unwrap();
// Run console in current thread, if it is configured.
let console = ConsoleLink::new(self.config.clone(), self.router_tx.clone());
let console = Arc::new(console);
runtime.block_on(async {
consolelink::start(console).await;
});
Ok(())
}
}
struct Server {
id: String,
config: ServerSettings,
router_tx: Sender<(Id, Event)>,
}
impl Server {
pub fn new(id: String, config: ServerSettings, router_tx: Sender<(Id, Event)>) -> Server {
Server {
id,
config,
router_tx,
}
}
async fn start(&self) {
let addr = "127.0.0.1:4442".parse().unwrap();
let server = QuicServer::new(
Some(addr),
Some("https://localhost:4442".to_string()),
Some("localhost".to_string()),
)
.await;
let delay = Duration::from_millis(self.config.next_connection_delay_ms);
let config = Arc::new(self.config.connections.clone());
let max_incoming_size = config.max_payload_size;
info!(
"Waiting for connections on {}. Server = {}",
self.config.listen, self.id
);
let network = { Network::new(server, max_incoming_size) };
let config = config.clone();
let router_tx = self.router_tx.clone();
// Spawn a new thread to handle this connection.
task::spawn(async {
let connector = Connector::new(config, router_tx);
if let Err(e) = connector.new_connection(network).await {
error!("Dropping link task!! Result = {:?}", e);
}
});
time::sleep(delay).await;
}
}
struct Connector {
config: Arc<ConnectionSettings>,
router_tx: Sender<(Id, Event)>,
}
impl Connector {
fn new(config: Arc<ConnectionSettings>, router_tx: Sender<(Id, Event)>) -> Connector {
Connector { config, router_tx }
}
/// A new network connection should wait for mqtt connect packet. This handling should be handled
/// asynchronously to avoid listener from not blocking new connections while this connection is
/// waiting for mqtt connect packet. Also this honours connection wait time as per config to prevent
/// denial of service attacks (rogue clients which only does network connection without sending
/// mqtt connection packet to make make the server reach its concurrent connection limit)
async fn new_connection(&self, network: Network) -> Result<(), Error> {
let config = self.config.clone();
let router_tx = self.router_tx.clone();
// Start the link
let (client_id, id, mut link) = RemoteLink::new(config, router_tx, network).await?;
let (execute_will, pending) = match link.start().await {
// Connection get close. This shouldn't usually happen
Ok(_) => {
error!("Stopped!! Id = {} ({})", client_id, id);
(true, link.state.clean())
}
// We are representing clean close as Abort in `Network`
Err(remotelink::Error::Io(e)) if e.kind() == io::ErrorKind::ConnectionAborted => {
info!("Closed!! Id = {} ({})", client_id, id);
(true, link.state.clean())
}
// Client requested disconnection.
Err(remotelink::Error::Disconnect) => {
info!("Disconnected!! Id = {} ({})", client_id, id);
(false, link.state.clean())
}
// Any other error
Err(e) => {
error!("Error!! Id = {} ({}), {}", client_id, id, e.to_string());
(true, link.state.clean())
}
};
let disconnect = Disconnection::new(client_id, execute_will, pending);
let disconnect = Event::Disconnect(disconnect);
let message = (id, disconnect);
self.router_tx.send(message)?;
Ok(())
}
}
pub trait IO: AsyncRead + AsyncWrite + Send + Sync + Unpin {}
impl<T: AsyncRead + AsyncWrite + Send + Sync + Unpin> IO for T {}
| 31.423676 | 104 | 0.618816 |
ea54abf0b94d11c53d2a3b74d2fc1098d6f0b558 | 2,052 | swift | Swift | NavigationBarTransition/Translucent/TranslucentViewController.swift | snownothing/NavigationBarTransition | 9045b5c519163a4d367eafc28bc0b466c654cf23 | [
"MIT"
] | null | null | null | NavigationBarTransition/Translucent/TranslucentViewController.swift | snownothing/NavigationBarTransition | 9045b5c519163a4d367eafc28bc0b466c654cf23 | [
"MIT"
] | null | null | null | NavigationBarTransition/Translucent/TranslucentViewController.swift | snownothing/NavigationBarTransition | 9045b5c519163a4d367eafc28bc0b466c654cf23 | [
"MIT"
] | null | null | null | //
// TranslucentViewController.swift
// NavigationBarTransition
//
// Created by Moch Xiao on 3/27/16.
// Copyright © 2016 Moch. All rights reserved.
//
import UIKit
class TranslucentViewController: UIViewController {
@IBOutlet weak var customNavigationBar: UINavigationBar!
@IBOutlet weak var textView: UITextView!
private var ratio: CGFloat = 0
override func viewDidLoad() {
super.viewDidLoad()
customNavigationBar.translucent = true
customNavigationBar.setBackgroundImage(UIImage(), forBarMetrics: .Default)
customNavigationBar.shadowImage = UIImage()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
navigationController?.setNavigationBarHidden(true, animated: true)
textView.addObserver(self, forKeyPath: "contentOffset", options: .New, context: nil)
}
override func viewWillDisappear(animated: Bool) {
super.viewWillDisappear(animated)
navigationController?.setNavigationBarHidden(false, animated: true)
textView.removeObserver(self, forKeyPath: "contentOffset")
}
}
extension TranslucentViewController {
override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
if keyPath != "contentOffset" { return }
let offSetY: CGFloat = textView.contentOffset.y
ratio = offSetY / 500
updateAppearance()
}
private func updateAppearance() {
let image = UIImageFromColor(UIColor.yellowColor().colorWithAlphaComponent(ratio))
customNavigationBar?.setBackgroundImage(image, forBarMetrics: .Default)
customNavigationBar?.shadowImage = image
customNavigationBar.translucent = ratio <= 1
}
}
extension TranslucentViewController {
@IBAction func pop(sender: UIBarButtonItem) {
navigationController?.popViewControllerAnimated(true)
}
} | 30.626866 | 157 | 0.691033 |
dae32a68d1054438c2c880ab4935911ec9323be4 | 12,469 | dart | Dart | lib/ui/bordered_container.dart | ashishbeck/slide_puzzle | e07be4cd82df851b5ce2099322495426a98cb6f5 | [
"MIT"
] | 1 | 2022-03-28T16:25:09.000Z | 2022-03-28T16:25:09.000Z | lib/ui/bordered_container.dart | ashishbeck/slide_puzzle | e07be4cd82df851b5ce2099322495426a98cb6f5 | [
"MIT"
] | null | null | null | lib/ui/bordered_container.dart | ashishbeck/slide_puzzle | e07be4cd82df851b5ce2099322495426a98cb6f5 | [
"MIT"
] | null | null | null | import 'package:flutter/material.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/scheduler.dart';
import 'package:provider/provider.dart';
import 'package:slide_puzzle/code/audio.dart';
import 'package:slide_puzzle/code/constants.dart';
import 'package:slide_puzzle/code/providers.dart';
class BorderedContainer extends StatefulWidget {
final String label;
final Widget child;
final double spacing;
final Color? color;
final bool isBottom;
final bool isRight;
final bool shouldAnimateEntry;
// final AnimationController? animationController;
final Function(AnimationController controller)? buttonController;
const BorderedContainer({
Key? key,
// this.animationController,
required this.label,
required this.child,
this.spacing = 10,
this.color,
this.isBottom = true,
this.isRight = true,
this.shouldAnimateEntry = true,
this.buttonController,
}) : super(key: key);
@override
State<BorderedContainer> createState() => _BorderedContainerState();
}
class _BorderedContainerState extends State<BorderedContainer>
with TickerProviderStateMixin {
late AnimationController controller;
late AnimationController buttonController;
bool isAnimatingBase = true;
Duration buttonDuration = Duration(milliseconds: 100);
_animateEntry() async {
ConfigProvider configProvider = context.read<ConfigProvider>();
if (configProvider.entryAnimationDone[widget.label] != null &&
configProvider.entryAnimationDone[widget.label]!) {
isAnimatingBase = false;
return;
}
buttonController.value = 1;
if (widget.label == "3x3" || widget.label == "4x4") {
configProvider.seenEntryAnimation("3x3");
configProvider.seenEntryAnimation("4x4");
}
if (widget.label == "Shuffle" || widget.label == "Reset") {
configProvider.seenEntryAnimation("Shuffle");
configProvider.seenEntryAnimation("Reset");
}
configProvider.seenEntryAnimation(widget.label);
AudioService.instance.elementEntry();
controller.forward().then((value) => setState(() {
isAnimatingBase = false;
buttonController.duration = Duration(milliseconds: 500);
buttonController
.reverse()
.then((value) => buttonController.duration = buttonDuration);
}));
}
@override
void initState() {
super.initState();
buttonController =
AnimationController(vsync: this, duration: buttonDuration);
if (widget.buttonController != null) {
widget.buttonController!(buttonController);
}
controller = AnimationController(
vsync: this, duration: Duration(milliseconds: defaultEntryTime));
if (widget.shouldAnimateEntry) {
_animateEntry();
} else {
// setState(() {
isAnimatingBase = false;
// });
}
}
@override
void dispose() {
controller.dispose();
buttonController.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
return LayoutBuilder(builder: (context, constraints) {
return Container(
// height: height + 50,
// width: width + 50,
child: isAnimatingBase
? ClipPath(
clipper: CustomClipperShape(
isBottom: widget.isBottom,
isRight: widget.isRight,
spacing: widget.spacing,
),
child: SlideTransition(
position: Tween<Offset>(
begin: const Offset(0, -1),
end: const Offset(0, 0),
).animate(
CurvedAnimation(
parent: controller, curve: Curves.easeInOutSine),
),
child: Stack(
clipBehavior: Clip.none,
children: [
Positioned(
bottom: widget.isBottom ? -widget.spacing : null,
top: widget.isBottom ? null : -widget.spacing,
right: widget.isRight ? -widget.spacing : null,
left: widget.isRight ? null : -widget.spacing,
child: Container(
height: constraints.maxHeight,
width: constraints.maxWidth,
color: widget.buttonController != null
? primaryColor
: secondaryColor),
),
],
),
),
)
: Stack(
clipBehavior: Clip.none,
alignment: Alignment.bottomRight,
children: [
Positioned(
bottom: widget.isBottom ? -widget.spacing : null,
top: widget.isBottom ? null : -widget.spacing,
right: widget.isRight ? -widget.spacing : null,
left: widget.isRight ? null : -widget.spacing,
child: CustomPaint(
painter: MyFrameShape(
spacing: widget.spacing,
color: widget.color ?? primaryColor,
isBottom: widget.isBottom,
isRight: widget.isRight,
animationController: buttonController,
),
child: Container(
height: constraints.maxHeight,
width: constraints.maxWidth,
),
),
),
(widget.buttonController != null || widget.shouldAnimateEntry)
? SlideTransition(
position: Tween<Offset>(
begin: const Offset(0, 0),
end: Offset(
(widget.isRight
? widget.spacing
: -widget.spacing) /
constraints.maxWidth,
(widget.isBottom
? widget.spacing
: -widget.spacing) /
constraints.maxHeight))
.animate(buttonController),
child: widget.child)
: widget.child,
],
),
);
});
}
}
class MyFrameShape extends CustomPainter {
final double spacing;
final Color? color;
final bool isBottom;
final bool isRight;
final AnimationController? animationController;
MyFrameShape({
this.spacing = 10,
this.color,
this.isBottom = true,
this.isRight = true,
this.animationController,
});
@override
void paint(Canvas canvas, Size size) {
double height = size.height;
double width = size.width;
Paint paint_0 = Paint()
..color = color ?? primaryColor
..style = PaintingStyle.fill
..strokeWidth = 0.5;
double animationOffset =
animationController != null ? (1 - animationController!.value) : 1;
Path bottomRight = Path()
..moveTo(-spacing * animationOffset, -spacing * animationOffset)
..lineTo(width - spacing * animationOffset, -spacing * animationOffset)
..lineTo(width, 0) // (width, spacing)
..lineTo(width, height)
..lineTo(0, height) // (spacing, height)
..lineTo(-spacing * animationOffset, height - spacing * animationOffset)
..lineTo(-spacing * animationOffset, -spacing * animationOffset)
..close();
if (isBottom && isRight) canvas.drawPath(bottomRight, paint_0);
// Paint bottomLeft = Paint()
// ..color = color
// ..style = PaintingStyle.fill
// ..strokeWidth = 0.5;
Path bottomLeft = Path()
..moveTo(0, 0)
..lineTo(spacing * animationOffset, -spacing * animationOffset)
..lineTo(width + spacing * animationOffset, -spacing * animationOffset)
..lineTo(
width + spacing * animationOffset, height - spacing * animationOffset)
..lineTo(width, height)
..lineTo(0, height)
..lineTo(0, 0);
if (isBottom && !isRight) canvas.drawPath(bottomLeft, paint_0);
Path topRight = Path()
..moveTo(-spacing * animationOffset, spacing * animationOffset)
..lineTo(0, 0)
..lineTo(width, 0)
..lineTo(width, height)
..lineTo(
width - spacing * animationOffset, height + spacing * animationOffset)
..lineTo(-spacing * animationOffset, height + spacing * animationOffset)
..lineTo(-spacing * animationOffset, spacing * animationOffset);
if (!isBottom && isRight) canvas.drawPath(topRight, paint_0);
}
@override
bool shouldRepaint(covariant CustomPainter oldDelegate) {
// TODO: implement shouldRepaint
return true;
}
}
class CustomClipperShape extends CustomClipper<Path> {
final double spacing;
final bool isBottom;
final bool isRight;
CustomClipperShape({
required this.spacing,
required this.isBottom,
required this.isRight,
});
@override
Path getClip(Size size) {
double height = size.height;
double width = size.width;
var bottomRight = Path()
..moveTo(spacing, spacing)
..lineTo(width + spacing, spacing)
..lineTo(width + spacing, height + spacing)
..lineTo(0 + spacing, height + spacing)
..lineTo(0 + spacing, 0 + spacing)
..close();
var bottomLeft = Path()
..moveTo(-spacing, spacing)
..lineTo(width - spacing, spacing)
..lineTo(width - spacing, height + spacing)
..lineTo(0 - spacing, height + spacing)
..lineTo(0 - spacing, 0 + spacing)
..close();
var topRight = Path()
..moveTo(spacing, -spacing)
..lineTo(width + spacing, -spacing)
..lineTo(width + spacing, height - spacing)
..lineTo(0 + spacing, height - spacing)
..lineTo(0 + spacing, 0 - spacing)
..close();
if (isBottom && isRight) {
return bottomRight;
} else if (isBottom && !isRight) {
return bottomLeft;
} else if (!isBottom && isRight) {
return topRight;
}
return bottomRight;
}
@override
bool shouldReclip(covariant CustomClipper<Path> oldClipper) {
return true;
}
}
class ArrowClipperShape extends CustomClipper<Path> {
final double spacing;
final bool isBottom;
final bool isRight;
ArrowClipperShape({
required this.spacing,
required this.isBottom,
required this.isRight,
});
@override
Path getClip(Size size) {
double height = size.height;
double width = size.width;
var bottomRight = Path()
..moveTo(spacing, spacing)
..lineTo(width + spacing, spacing)
..lineTo(width + spacing, height + spacing)
..lineTo(0 + spacing, height + spacing)
..lineTo(0 + spacing, 0 + spacing)
..close();
var bottomLeft = Path()
..moveTo(-spacing, -spacing)
..lineTo(width, -spacing)
..lineTo(width, height + spacing)
..lineTo(0 - spacing, height + spacing)
..lineTo(0 - spacing, 0 - spacing)
..close();
// var bottomLeft = Path()
// ..moveTo(-spacing, spacing)
// ..lineTo(width - spacing, spacing)
// ..lineTo(width - spacing, height + spacing)
// ..lineTo(0 - spacing, height + spacing)
// ..lineTo(0 - spacing, 0 + spacing)
// ..close();
// var topRight = Path()
// ..moveTo(width - spacing, -spacing)
// ..lineTo(width + spacing, -spacing)
// ..lineTo(width + spacing, height + spacing)
// ..lineTo(0 + spacing, height + spacing)
// ..lineTo(0 + spacing, 0 - spacing)
// ..close();
var topRight = Path()
..moveTo(0, 0)
..lineTo(spacing, -spacing)
..lineTo(width + spacing, -spacing)
..lineTo(width + spacing, height - spacing)
..lineTo(width, height)
..lineTo(0, height)
..lineTo(0, 0)
..close();
if (isBottom && isRight) {
return bottomRight;
} else if (isBottom && !isRight) {
return bottomLeft;
} else if (!isBottom && isRight) {
return topRight;
}
return bottomRight;
}
@override
bool shouldReclip(covariant CustomClipper<Path> oldClipper) {
return true;
}
}
| 32.471354 | 80 | 0.565723 |
257712ee7622da60dafbb7afe27b67fe9866f329 | 8,002 | cs | C# | vendor/nuget/src/VsConsole/PowerShellHost/HostUtilities.cs | SubasishMohapatra/SquirrelDemo | 470feea41f64d99b5c7a967ba1e75035ddc77453 | [
"MIT"
] | null | null | null | vendor/nuget/src/VsConsole/PowerShellHost/HostUtilities.cs | SubasishMohapatra/SquirrelDemo | 470feea41f64d99b5c7a967ba1e75035ddc77453 | [
"MIT"
] | null | null | null | vendor/nuget/src/VsConsole/PowerShellHost/HostUtilities.cs | SubasishMohapatra/SquirrelDemo | 470feea41f64d99b5c7a967ba1e75035ddc77453 | [
"MIT"
] | null | null | null | using System;
using System.Collections.Generic;
using System.IO;
using System.Management.Automation;
using System.Reflection;
using Microsoft.Win32;
namespace NuGetConsole.Host.PowerShell
{
internal enum SuggestionMatchType
{
Command = 0,
Error = 1,
Dynamic = 2
}
/// <summary>
/// Implements utility methods that might be used by Hosts.
/// </summary>
internal static class HostUtilities
{
#region GetProfileCommands
/// <summary>
/// Gets a PSObject whose base object is currentUserCurrentHost and with notes for the other 4 parameters.
/// </summary>
/// <param name="allUsersAllHosts">The profile file name for all users and all hosts.</param>
/// <param name="allUsersCurrentHost">The profile file name for all users and current host.</param>
/// <param name="currentUserAllHosts">The profile file name for current user and all hosts.</param>
/// <param name="currentUserCurrentHost">The profile name for current user and current host.</param>
/// <returns>A PSObject whose base object is currentUserCurrentHost and with notes for the other 4 parameters.</returns>
internal static PSObject GetDollarProfile(string allUsersAllHosts, string allUsersCurrentHost, string currentUserAllHosts, string currentUserCurrentHost)
{
PSObject returnValue = new PSObject(currentUserCurrentHost);
returnValue.Properties.Add(new PSNoteProperty("AllUsersAllHosts", allUsersAllHosts));
returnValue.Properties.Add(new PSNoteProperty("AllUsersCurrentHost", allUsersCurrentHost));
returnValue.Properties.Add(new PSNoteProperty("CurrentUserAllHosts", currentUserAllHosts));
returnValue.Properties.Add(new PSNoteProperty("CurrentUserCurrentHost", currentUserCurrentHost));
return returnValue;
}
/// <summary>
/// Gets an array of commands that can be run sequentially to set $profile and run the profile commands.
/// </summary>
/// <param name="shellId">The id identifying the host or shell used in profile file names.</param>
/// <returns></returns>
public static PSCommand[] GetProfileCommands(string shellId)
{
return HostUtilities.GetProfileCommands(shellId, false);
}
/// <summary>
/// Gets an array of commands that can be run sequentially to set $profile and run the profile commands.
/// </summary>
/// <param name="shellId">The id identifying the host or shell used in profile file names.</param>
/// <param name="useTestProfile">used from test not to overwrite the profile file names from development boxes</param>
/// <returns></returns>
internal static PSCommand[] GetProfileCommands(string shellId, bool useTestProfile)
{
List<PSCommand> commands = new List<PSCommand>();
string allUsersAllHosts = HostUtilities.GetFullProfileFileName(null, false, useTestProfile);
string allUsersCurrentHost = HostUtilities.GetFullProfileFileName(shellId, false, useTestProfile);
string currentUserAllHosts = HostUtilities.GetFullProfileFileName(null, true, useTestProfile);
string currentUserCurrentHost = HostUtilities.GetFullProfileFileName(shellId, true, useTestProfile);
PSObject dollarProfile = HostUtilities.GetDollarProfile(allUsersAllHosts, allUsersCurrentHost, currentUserAllHosts, currentUserCurrentHost);
PSCommand command = new PSCommand();
command.AddCommand("set-variable");
command.AddParameter("Name", "profile");
command.AddParameter("Value", dollarProfile);
command.AddParameter("Option", ScopedItemOptions.None);
commands.Add(command);
string[] profilePaths = new string[] { allUsersAllHosts, allUsersCurrentHost, currentUserAllHosts, currentUserCurrentHost };
foreach (string profilePath in profilePaths)
{
if (!System.IO.File.Exists(profilePath))
{
continue;
}
command = new PSCommand();
command.AddCommand(profilePath, false);
commands.Add(command);
}
return commands.ToArray();
}
/// <summary>
/// Used to get all profile file names for the current or all hosts and for the current or all users.
/// </summary>
/// <param name="shellId">null for all hosts, not null for the specified host</param>
/// <param name="forCurrentUser">false for all users, true for the current user.</param>
/// <param name="useTestProfile">used from test not to overwrite the profile file names from development boxes</param>
/// <returns>The profile file name matching the parameters.</returns>
internal static string GetFullProfileFileName(string shellId, bool forCurrentUser, bool useTestProfile)
{
string basePath;
if (forCurrentUser)
{
basePath = Environment.GetFolderPath(Environment.SpecialFolder.Personal);
basePath = System.IO.Path.Combine(basePath, "WindowsPowerShell");
}
else
{
basePath = GetAllUsersFolderPath();
if (string.IsNullOrEmpty(basePath))
{
return "";
}
}
string profileName = useTestProfile ? "profile_test.ps1" : "profile.ps1";
if (!string.IsNullOrEmpty(shellId))
{
profileName = shellId + "_" + profileName;
}
return Path.Combine(basePath, profileName);
}
/// <summary>
/// Used internally in GetFullProfileFileName to get the base path for all users profiles.
/// </summary>
///
/// <returns>the base path for all users profiles.</returns>
private static string GetAllUsersFolderPath()
{
string folderPath = string.Empty;
try
{
folderPath = GetApplicationBase();
}
catch (System.Security.SecurityException)
{
}
return folderPath;
}
internal const string MonadRootKeyPath = "Software\\Microsoft\\PowerShell";
internal const string MonadEngineKey = "PowerShellEngine";
internal const string MonadEngine_ApplicationBase = "ApplicationBase";
internal const string RegistryVersionKey = "1";
internal static string GetApplicationBase()
{
string engineKeyPath = MonadRootKeyPath + "\\" + RegistryVersionKey + "\\" + MonadEngineKey;
using (RegistryKey engineKey = Registry.LocalMachine.OpenSubKey(engineKeyPath))
{
if (engineKey != null)
return engineKey.GetValue(MonadEngine_ApplicationBase) as string;
}
// The default keys aren't installed, so try and use the entry assembly to
// get the application base. This works for managed apps like minishells...
Assembly assem = Assembly.GetEntryAssembly();
if (assem != null)
{
// For minishells, we just return the executable path.
return Path.GetDirectoryName(assem.Location);
}
// FOr unmanaged host apps, look for the SMA dll, if it's not GAC'ed then
// use it's location as the application base...
assem = Assembly.GetAssembly(typeof(PSObject));
if (assem != null)
{
// For other hosts.
return Path.GetDirectoryName(assem.Location);
}
// otherwise, just give up...
return "";
}
#endregion GetProfileCommands
}
}
| 43.021505 | 161 | 0.621595 |
14bc716203b428eaa5bae937e64391a16dfa6668 | 4,419 | ts | TypeScript | src/MesaView.ts | 9sako6/MESA | 5a4f170a132d20410cf554890f5d26d107ca0664 | [
"MIT"
] | 3 | 2019-02-12T10:03:07.000Z | 2020-09-05T07:07:19.000Z | src/MesaView.ts | 9sako6/MESA | 5a4f170a132d20410cf554890f5d26d107ca0664 | [
"MIT"
] | 9 | 2019-02-11T20:11:31.000Z | 2020-01-03T20:35:40.000Z | src/MesaView.ts | 9sako6/MESA | 5a4f170a132d20410cf554890f5d26d107ca0664 | [
"MIT"
] | null | null | null | import $ from "jquery";
import MesaModel, { Attribute, Tag } from "./MesaModel";
export default class MesaView {
writeTextArea(text: string, model: MesaModel): void {
model.editor.session.setValue(text);
}
initUploadButton(): void {
const button: string = `
<form>
<label class="func-btn" for="upload-button">
Open
<input type="file" id="upload-button" style="display:none;">
</label>
<span class="file-info" id="file-name"></span>
</form>`;
$("#upload-button").replaceWith(button);
}
initSaveButton(): void {
const button: string = `
<table>
<tr>
<td>
<div class="func-btn"><a id="text-donwload" download="mesa_file.xml" href="#">Save</a></div>
</td>
<td>
<input type='text' id="download-filename" placeholder="Enter a file name">
<span class="file-info">.xml</span>
</td>
</tr>
</table>`;
$("#save-button").replaceWith(button);
}
initTagUploadButton(): void {
const button: string = `
<form>
<label class="func-btn" id="load-json" for="load-tags-button">
Load Tags
<input type="file" id="load-tags-button" style="display:none;">
</label>
<span class="file-info" id="tag-file-name"></span>
</form>`;
$("#tag-upload-button").replaceWith(button);
}
initTagSaveButton(): void {
const button: string = `
<div class="func-btn"><a id="json-donwload" download="mesa_tags.json" href="#">Save Tags</a></div>
<input type='text' id="download-jsonname" placeholder="Enter a file name">
<span class="file-info">.json</span>`;
$("#tag-save-button").replaceWith(button);
}
initTagSettingTable(): void {
const nameRow: string = `
<td class="table-header">Name</td>
<td><input type='text' id="tag-name-form" placeholder="Enter a tag name"></td>`;
const sepRow: string = `
<td class=" table-header">Separator</td>
<td><input type='text' id="tag-sep-form" placeholder="If you need ..."></td>`;
const isXmlRow: string = `
<td class="table-header">XML Tag?</td>
<td>
<input id="xml-flag" type="checkbox">
<label for="xml-flag"></label>
</td>`;
const attributeRow: string = `
<td><input type='text' class="attribute-name-form" placeholder="Enter a name"></td>
<td><input type='text' class="attribute-value-form" placeholder="Enter a value"></td>`;
const addAttributeButton: string = `<div class="func-btn" id="add-attribute">Add an attribute</div>`;
const table: string = `
<table class="tag-setting-table">
<tr>
${isXmlRow}
</tr>
<tr>
${nameRow}
</tr>
<tr id="tag-separator">
${sepRow}
</tr>
<tr id="attributes-header">
<td class="table-header">Attributes</td><td>${addAttributeButton}</td>
</tr>
</table>
<table class="tag-setting-table" id="attributes-input">
<tr>
${attributeRow}
</tr>
</table>`;
$("#tag-setting-table").html(table);
}
makeTagButton(tagList: Tag[]): void {
let addElem: string = "";
for (let tag of tagList) {
if (tag.xmlFlag) {
// get attributes
let attributes: string = "";
if (tag.attributes !== undefined) {
tag.attributes.forEach(function(attr: Attribute) {
attributes += `${attr.name}__MESA_ATTRIBUTE_SEPARATOR__${attr.value},`; // __MESA_ATTRIBUTE_SEPARATOR__ and comma is neccessary
});
}
// make tag
addElem += `<div class="func-btn xml-tag-btn" val="${tag.name}" attributes="${attributes}">${tag.name}</div>`;
} else {
addElem += `<div class="func-btn tag-btn" val="${tag.sepChar +
tag.name}">${tag.name}</div>`;
}
}
// add buttons
$("#tags").append(addElem);
}
hideAddedMsg(): void {
$("#added-message").hide();
}
showAddedMsg(tagInfoDic: Tag): void {
$("#added-message").append("");
document.getElementById(
"added-message"
)!.innerText = `${tagInfoDic.name} was added.`;
$("#added-message").show();
$("#added-message").fadeOut(1500);
}
addAttributesInput(): void {
const attributeRow: string = `
<td><input type='text' id="attribute-name-form" placeholder="Enter a name"></td>
<td><input type='text' id="attribute-value-form" placeholder="Enter a value"></td>`;
$("#attributes-input").append(`<tr>${attributeRow}</tr>`);
}
}
| 30.6875 | 139 | 0.589274 |
da72bfd0f41c52ac19ecce10b5021e7f2609476b | 2,438 | php | PHP | resources/views/admin/changepassword.blade.php | mitulmlakhani/platter | dad2a0e98ff0c99f61c366e23c57727c2fc7ec49 | [
"MIT"
] | null | null | null | resources/views/admin/changepassword.blade.php | mitulmlakhani/platter | dad2a0e98ff0c99f61c366e23c57727c2fc7ec49 | [
"MIT"
] | null | null | null | resources/views/admin/changepassword.blade.php | mitulmlakhani/platter | dad2a0e98ff0c99f61c366e23c57727c2fc7ec49 | [
"MIT"
] | null | null | null | @extends('layouts.admin')
@section('page-title', 'Change Password')
@section('module', 'Change Password')
@section('content')
<div class="container">
<div class="row justify-content-center">
<div class="col-md-8">
@if (\Session::has('success'))
<div class="alert alert-success alert-dismissible fade show" role="alert">
<strong>Success ! {{ session('success') }}</strong> .
<button type="button" class="close" data-dismiss="alert" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
@endif
<div class="card card-primary card-outline">
<div class="card-header">
<h5 class="m-0">Change Password</h5>
</div>
<div class="card-body">
<form method="post" action="{{ route('admin.password.save') }}" enctype="multipart/form-data">
@csrf
<div class="form-group">
<label for="password">Current Password</label>
<input type="password" class="form-control {{ $errors->has('password') ? 'is-invalid' : '' }}" name="password" id="password" placeholder="Enter Current password">
<div class="invalid-feedback">{{ $errors->first('password') }}</div>
</div>
<div class="form-group">
<label for="new_password">New Password</label>
<input type="password" class="form-control {{ $errors->has('new_password') ? 'is-invalid' : '' }}" name="new_password" id="new_password" placeholder="Enter new password">
<div class="invalid-feedback">{{ $errors->first('new_password') }}</div>
</div>
<div class="form-group">
<label for="renew_password">Re Enter New Password</label>
<input type="password" class="form-control {{ $errors->has('renew_password') ? 'is-invalid' : '' }}" name="renew_password" id="renew_password" placeholder="Re enter new password">
<div class="invalid-feedback">{{ $errors->first('renew_password') }}</div>
</div>
<button type="submit" class="btn btn-primary">Submit</button>
</form>
</div>
</div>
</div>
</div>
</div>
@stop
| 47.803922 | 203 | 0.52338 |
148fa05e5b14693b5257d98c8e442f1c68fe7737 | 6,180 | ts | TypeScript | src/modules/cau-notice-watcher/index.ts | paywteam/eodiro-api-mysql | 5276aa32340da11a627bf61dc3b37db63c73ead2 | [
"MIT"
] | 10 | 2020-06-22T11:41:15.000Z | 2021-07-16T02:19:22.000Z | src/modules/cau-notice-watcher/index.ts | paywteam/eodiro-api-mysql | 5276aa32340da11a627bf61dc3b37db63c73ead2 | [
"MIT"
] | 19 | 2020-06-02T11:40:17.000Z | 2021-04-13T23:14:30.000Z | src/modules/cau-notice-watcher/index.ts | paywteam/eodiro-api2 | 5276aa32340da11a627bf61dc3b37db63c73ead2 | [
"MIT"
] | 3 | 2020-10-04T13:02:05.000Z | 2021-01-22T16:05:35.000Z | import prisma from '@/modules/prisma'
import appRoot from 'app-root-path'
import chalk from 'chalk'
import fs from 'fs'
import { JSDOM } from 'jsdom'
import { PendingXHR } from 'pending-xhr-puppeteer'
import puppeteer, { Browser, Page } from 'puppeteer'
import { telegramBot } from '../telegram-bot'
export type TitleBuilder = (
/** A single notice item */ noticeItemElement: HTMLElement | Element
) => string
export type UrlBuilder = (
/** A single notice item */ noticeItemElement: HTMLElement | Element
) => string
export type FeedOptions = {
/**
* Minutes
* @default 10
*/
interval?: number
}
export interface Publisher {
/** Notice name which will be displayed on the end users */
name: string
/** Unique key(id) for differentiating each subscriber */
key: string
url: string
/** A CSS selector of */
noticeItemSelector: string
titleBuilder: TitleBuilder
urlBuilder?: UrlBuilder
}
export type PublisherBuilder = (siteInformation: {
name: string
key: string
url: string
}) => Publisher
export type LastNotice = Record<
string,
{
displayName: string
title: string
}
>
const eodiroTempDir = appRoot.resolve('/.eodiro')
const lastNoticeFilePath = appRoot.resolve('/.eodiro/last_notice.json')
export class CauNoticeWatcher {
private feedOptions: FeedOptions
private publishers: Publisher[] = []
private lastNotice: LastNotice
constructor(feedOptions?: FeedOptions) {
if (!feedOptions) {
feedOptions = {
interval: 10,
}
} else if (!feedOptions?.interval) {
feedOptions.interval = 10
}
this.feedOptions = feedOptions
this.lastNotice = CauNoticeWatcher.loadLastNoticeFile()
}
public register(publisher: Publisher): void {
for (const registeredSubscriber of this.publishers) {
if (registeredSubscriber.key === publisher.key) {
throw new Error(
`${chalk.blueBright(
'[Notice Watcher]'
)} Duplicate subscriber key detected: ${publisher.key}`
)
}
}
this.publishers.push(publisher)
if (!this.lastNotice[publisher.key]) {
this.lastNotice[publisher.key] = {
displayName: publisher.name,
title: '',
}
}
}
/**
* Get the `last_notice.json` file inside '.eodiro' directory
*/
public static loadLastNoticeFile(): LastNotice {
let lastNotice: LastNotice
if (!fs.existsSync(eodiroTempDir)) {
fs.mkdirSync(eodiroTempDir)
}
if (!fs.existsSync(lastNoticeFilePath)) {
lastNotice = {}
fs.writeFileSync(lastNoticeFilePath, JSON.stringify(lastNotice, null, 2))
} else {
lastNotice = JSON.parse(fs.readFileSync(lastNoticeFilePath, 'utf8'))
}
return lastNotice
}
private writeLastNoticeFile() {
fs.writeFileSync(
lastNoticeFilePath,
JSON.stringify(this.lastNotice, null, 2)
)
}
private getLastNoticeTitle(publisher: Publisher) {
return this.lastNotice[publisher.key].title
}
private updateLastNotice(publisher: Publisher, title: string) {
this.lastNotice[publisher.key] = {
displayName: publisher.name,
title,
}
}
public async run(): Promise<void> {
const browser = await puppeteer.launch()
const processResults = []
for (const subscriber of this.publishers) {
processResults.push(this.processPublisher(browser, subscriber))
}
await Promise.all(processResults)
// Dispose the browser
await browser.close()
}
private async processPublisher(browser: Browser, publisher: Publisher) {
const page = await browser.newPage()
page.setViewport({ width: 1280, height: 800 })
// page.setMaxListeners(Infinity)
const noticesSet = await CauNoticeWatcher.visit(page, publisher)
const notices = Array.from(noticesSet)
if (notices.length === 0) {
return
}
// Get subscriptions
const subscriptions = await prisma.noticeNotificationsSubscription.findMany(
{
where: {
noticeKey: publisher.key,
},
select: {
user: {
select: {
telegrams: {
select: {
chatId: true,
},
},
},
},
},
}
)
const chatIds = subscriptions
.map((sub) => sub.user.telegrams.map((tel) => tel.chatId))
.flat()
const shouldSendPush = chatIds.length > 0
const lastNoticeIndex = notices.findIndex(
(notice) => notice.title === this.getLastNoticeTitle(publisher)
)
if (lastNoticeIndex > 0 && shouldSendPush) {
for (let i = lastNoticeIndex - 1; i >= 0; i -= 1) {
const notice = notices[i]
// Send Telegram notifications
chatIds.forEach((chatId) => {
return telegramBot.sendMessage(
chatId,
`
새로운 <b>${publisher.name}</b> 공지사항이 올라왔습니다.
<b>${notice.title}</b>
${notice.noticeItemUrl}
`,
{ parse_mode: 'HTML' }
)
})
}
}
await page.close()
this.updateLastNotice(publisher, notices[0].title)
this.writeLastNoticeFile()
}
static async visit(
page: Page,
publisher: Publisher,
pageNumber?: number
): Promise<
{
title: string
noticeItemUrl: string
}[]
> {
const pendingXHR = new PendingXHR(page)
try {
await page.goto(publisher.url)
await pendingXHR.waitForAllXhrFinished()
await page.waitForSelector(publisher.noticeItemSelector)
} catch (err) {
throw new Error(err)
}
const bodyHtml = await page.$eval('body', (body) => body.innerHTML)
const { body } = new JSDOM(bodyHtml).window.document
const notices: {
title: string
noticeItemUrl: string
}[] = []
const noticeElms = body.querySelectorAll(publisher.noticeItemSelector)
for (const noticeElm of Array.from(noticeElms)) {
const title = publisher.titleBuilder(noticeElm)
const noticeItemUrl = publisher.urlBuilder
? publisher.urlBuilder(noticeElm)
: publisher.url
notices.push({
title,
noticeItemUrl,
})
}
return notices
}
}
| 23.769231 | 80 | 0.628964 |
ef5a29732a4fa4482ab45c0c316d007e1917ac38 | 5,202 | c | C | berkeley/blip-2.0/support/sdk/c/blip/lib6lowpan/lib6lowpan_frag.c | tinyos-io/tinyos-3.x-contrib | 3aaf036722a2afc0c0aad588459a5c3e00bd3c01 | [
"BSD-3-Clause",
"MIT"
] | 1 | 2020-02-28T20:35:09.000Z | 2020-02-28T20:35:09.000Z | berkeley/blip-2.0/support/sdk/c/blip/lib6lowpan/lib6lowpan_frag.c | tinyos-io/tinyos-3.x-contrib | 3aaf036722a2afc0c0aad588459a5c3e00bd3c01 | [
"BSD-3-Clause",
"MIT"
] | null | null | null | berkeley/blip-2.0/support/sdk/c/blip/lib6lowpan/lib6lowpan_frag.c | tinyos-io/tinyos-3.x-contrib | 3aaf036722a2afc0c0aad588459a5c3e00bd3c01 | [
"BSD-3-Clause",
"MIT"
] | null | null | null |
#include <stdint.h>
#include <string.h>
#include <stdlib.h>
#include "6lowpan.h"
#include "Ieee154.h"
#include "ip.h"
#include "lib6lowpan.h"
#include "nwbyte.h"
#include "ip_malloc.h"
#include "iovec.h"
int lowpan_recon_complete(struct lowpan_reconstruct *recon,
struct ip6_packet_headers *hdrs);
int lowpan_recon_start(struct ieee154_frame_addr *frame_addr,
struct lowpan_reconstruct *recon,
uint8_t *pkt, size_t len) {
uint8_t *unpack_point, *unpack_end;
struct packed_lowmsg msg;
msg.data = pkt;
msg.len = len;
msg.headers = getHeaderBitmap(&msg);
if (msg.headers == LOWMSG_NALP) return -1;
/* remove the 6lowpan headers from the payload */
unpack_point = getLowpanPayload(&msg);
len -= (unpack_point - pkt);
/* set up the reconstruction, or just fill in the packet length */
if (hasFrag1Header(&msg)) {
getFragDgramTag(&msg, &recon->r_tag);
getFragDgramSize(&msg, &recon->r_size);
} else {
recon->r_size = LIB6LOWPAN_MAX_LEN + LOWPAN_LINK_MTU;
}
recon->r_buf = malloc(recon->r_size);
if (!recon->r_buf) return -1;
memset(recon->r_buf, 0, recon->r_size);
/* unpack the first fragment */
unpack_end = lowpan_unpack_headers(recon->r_buf, recon->r_size,
frame_addr,
unpack_point, len);
if (!unpack_end) {
free(recon->r_buf);
return -1;
}
if (!hasFrag1Header(&msg)) {
recon->r_size = (unpack_end - recon->r_buf);
}
recon->r_bytes_rcvd = unpack_end - recon->r_buf;
/* done, updated all the fields */
/* reconstruction is complete if r_bytes_rcvd == r_size */
return 0;
}
int lowpan_recon_add(struct lowpan_reconstruct *recon,
uint8_t *pkt, size_t len) {
struct packed_lowmsg msg;
uint8_t *buf;
msg.data = pkt;
msg.len = len;
msg.headers = getHeaderBitmap(&msg);
if (msg.headers == LOWMSG_NALP) return -1;
if (!hasFragNHeader(&msg)) {
return -1;
}
buf = getLowpanPayload(&msg);
len -= (buf - pkt);
if (recon->r_size < recon->r_bytes_rcvd + len) return -1;
/* just need to copy the new payload in and return */
memcpy(recon->r_buf + recon->r_bytes_rcvd, buf, len);
recon->r_bytes_rcvd += len;
return 0;
}
int lowpan_frag_get(uint8_t *frag, size_t len,
struct ip6_packet *packet,
struct ieee154_frame_addr *frame,
struct lowpan_ctx *ctx) {
uint8_t *buf, *lowpan_buf, *ieee_buf = frag;
uint16_t extra_payload;
/* pack 802.15.4 */
buf = lowpan_buf = pack_ieee154_header(frag, len, frame);
if (ctx->offset == 0) {
int offset;
/* pack the IPv6 header */
buf = lowpan_pack_headers(packet, frame, buf, len - (buf - frag));
if (!buf) return -1;
/* pack the next headers */
offset = pack_nhc_chain(&buf, len - (buf - ieee_buf), packet);
if (offset < 0) return -2;
/* copy the rest of the payload into this fragment */
extra_payload = ntohs(packet->ip6_hdr.ip6_plen) - offset;
/* may need to fragment -- insert a FRAG1 header if so */
if (extra_payload > len - (buf - ieee_buf)) {
struct packed_lowmsg lowmsg;
memmove(lowpan_buf + LOWMSG_FRAG1_LEN,
lowpan_buf,
buf - lowpan_buf);
lowmsg.data = lowpan_buf;
lowmsg.len = LOWMSG_FRAG1_LEN;
lowmsg.headers = 0;
setupHeaders(&lowmsg, LOWMSG_FRAG1_HDR);
setFragDgramSize(&lowmsg, ntohs(packet->ip6_hdr.ip6_plen) + sizeof(struct ip6_hdr));
setFragDgramTag(&lowmsg, ctx->tag);
lowpan_buf += LOWMSG_FRAG1_LEN;
buf += LOWMSG_FRAG1_LEN;
extra_payload = len - (buf - ieee_buf);
extra_payload -= (extra_payload % 8);
}
if (iov_read(packet->ip6_data, offset, extra_payload, buf) != extra_payload) {
return -3;
}
ctx->offset = offset + extra_payload + sizeof(struct ip6_hdr);
return (buf - frag) + extra_payload;
} else {
struct packed_lowmsg lowmsg;
buf = lowpan_buf = pack_ieee154_header(frag, len, frame);
/* setup the FRAGN header */
lowmsg.data = lowpan_buf;
lowmsg.len = LOWMSG_FRAGN_LEN;
lowmsg.headers = 0;
setupHeaders(&lowmsg, LOWMSG_FRAGN_HDR);
if (setFragDgramSize(&lowmsg, ntohs(packet->ip6_hdr.ip6_plen) + sizeof(struct ip6_hdr)))
return -5;
if (setFragDgramTag(&lowmsg, ctx->tag))
return -6;
if (setFragDgramOffset(&lowmsg, ctx->offset / 8))
return -7;
buf += LOWMSG_FRAGN_LEN;
extra_payload = ntohs(packet->ip6_hdr.ip6_plen) + sizeof(struct ip6_hdr) - ctx->offset;
if (extra_payload > len - (buf - ieee_buf)) {
extra_payload = len - (buf - ieee_buf);
extra_payload -= (extra_payload % 8);
}
/* { */
/* int i; */
/* for (i = 0; i < extra_payload; i++) { */
/* buf[i] = i; */
/* } */
/* } */
if (iov_read(packet->ip6_data, ctx->offset - sizeof(struct ip6_hdr), extra_payload, buf) != extra_payload) {
return -4;
}
ctx->offset += extra_payload;
if (extra_payload == 0) return 0;
else return (lowpan_buf - ieee_buf) + LOWMSG_FRAGN_LEN + extra_payload;
}
}
| 28.9 | 112 | 0.621684 |
f35705b36964f57ed3218526b51d3244afdf6a24 | 70 | lua | Lua | modules/express/tests/test-express.lua | czanyou/node.lua | 1711425dd042ff5b6034a866653400fb00bede76 | [
"Apache-2.0"
] | 19 | 2016-05-04T12:22:12.000Z | 2021-11-04T17:36:44.000Z | modules/express/tests/test-express.lua | czanyou/node.lua | 1711425dd042ff5b6034a866653400fb00bede76 | [
"Apache-2.0"
] | 2 | 2015-11-01T12:48:13.000Z | 2018-10-25T09:14:41.000Z | modules/express/tests/test-express.lua | czanyou/node.lua | 1711425dd042ff5b6034a866653400fb00bede76 | [
"Apache-2.0"
] | 6 | 2016-03-30T15:21:06.000Z | 2020-10-27T06:48:48.000Z | local express = require('express')
local path = require('path')
| 23.333333 | 35 | 0.657143 |
da587cc55e36d7c38f24b64ca5e07d17d51434b5 | 1,962 | php | PHP | frontend/views/transactions/_search.php | ArtemBaranovsky/ewallet | 17d240fe55461ca30d7170b37d657c74d3894e65 | [
"BSD-3-Clause"
] | 2 | 2019-11-12T15:14:16.000Z | 2020-02-13T17:39:17.000Z | frontend/views/transactions/_search.php | ArtemBaranovsky/ewallet | 17d240fe55461ca30d7170b37d657c74d3894e65 | [
"BSD-3-Clause"
] | null | null | null | frontend/views/transactions/_search.php | ArtemBaranovsky/ewallet | 17d240fe55461ca30d7170b37d657c74d3894e65 | [
"BSD-3-Clause"
] | 2 | 2019-11-12T15:14:18.000Z | 2021-07-27T14:14:40.000Z | <?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
/* @var $this yii\web\View */
/* @var $model backend\models\TransactionsSearch */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="transactions-search">
<?php //var_dump("<pre>", $model, "</pre>"); ?>
<?php //yii\helpers\VarDumper::dump($model, 10, true); ?>
<?php //yii\helpers\VarDumper::dump($dataProvider, 10, true); ?>
<?php $form = ActiveForm::begin([
'action' => ['index'],
'method' => 'get',
]); ?>
<?= $form->field($model, 'id') ?>
<?//= $form->field($model, 'sender_wallet_id') ?>
<?//= $form->field($model, 'recipient_wallet_id') ?>
<?= $form->field($model, 'sender_currency_amount') ?>
<?= $form->field($model, 'recipient_currency_amount') ?>
<?= $form->field($model, 'senderemail') ?>
<?= $form->field($model, 'recipientemail') ?>
<?php //echo $form->field($model, 'timestamp')->widget(\yii\widgets\MaskedInput::className(), [
// 'model' => $model,
// 'attribute' => 'date',
// 'name' => 'date',
// 'mask' => '99/99/9999',
//]); ?>
<?php //echo $form->field($model, "timestamp")->widget(DateControl::classname(), ['type' => DateControl::FORMAT_DATETIME, 'displayFormat' => 'dd/MM/yyyy', 'autoWidget' => false, 'widgetClass' => 'yii\\widgets\\MaskedInput', 'options' => ['mask' => '99/99/9999', 'options' => ['class' => 'form-control', 'placeholder' => 'Data nascimento...']]]) ?>
<?php echo $form->field($model, 'senderwalletcurrency') ?>
<?php echo $form->field($model, 'recipientwalletcurrency') ?>
<?php echo $form->field($model, 'senderwalletname') ?>
<?php echo $form->field($model, 'recipientwalletname') ?>
<div class="form-group">
<?= Html::submitButton('Search', ['class' => 'btn btn-primary']) ?>
<?= Html::resetButton('Reset', ['class' => 'btn btn-outline-secondary']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
| 35.035714 | 351 | 0.571865 |
8709087c7c0dc3db3a2519e6d456fc87e26d8952 | 1,019 | rs | Rust | feather/common/src/tick_loop.rs | PauMAVA/feather | f6735af879324a65b04ae176977ba7ac17e70f89 | [
"Apache-2.0"
] | 1,417 | 2019-11-03T14:03:40.000Z | 2022-03-31T23:56:58.000Z | feather/common/src/tick_loop.rs | PauMAVA/feather | f6735af879324a65b04ae176977ba7ac17e70f89 | [
"Apache-2.0"
] | 278 | 2019-11-06T22:22:33.000Z | 2022-03-12T06:01:02.000Z | feather/common/src/tick_loop.rs | PauMAVA/feather | f6735af879324a65b04ae176977ba7ac17e70f89 | [
"Apache-2.0"
] | 189 | 2019-11-03T14:11:34.000Z | 2022-03-21T20:31:36.000Z | use std::time::Instant;
use base::TICK_DURATION;
/// Utility to invoke a function in a tick loop, once
/// every 50ms.
pub struct TickLoop {
function: Box<dyn FnMut() -> bool>,
}
impl TickLoop {
/// Creates a `TickLoop`. The given `function` is called
/// each tick. Returning `true` from `function` causes the
/// tick loop to exit.
pub fn new(function: impl FnMut() -> bool + 'static) -> Self {
Self {
function: Box::new(function),
}
}
/// Runs the tick loop until the callback returns `true`.
pub fn run(mut self) {
loop {
let start = Instant::now();
let should_exit = (self.function)();
if should_exit {
return;
}
let elapsed = start.elapsed();
if elapsed > TICK_DURATION {
log::warn!("Tick took too long ({:?})", elapsed);
} else {
std::thread::sleep(TICK_DURATION - elapsed);
}
}
}
}
| 26.128205 | 66 | 0.523062 |
f46f1833988ed86b65add44bd159a51332bcd4c9 | 1,431 | cs | C# | iFactr.Compact/Menus/MenuButton.cs | Zebra/iFactr-NETCF | dbf30d9591e15fd70ef8230ee3a7492142cf39a3 | [
"MIT"
] | 6 | 2018-01-14T05:19:05.000Z | 2022-02-24T18:17:42.000Z | iFactr.Compact/Menus/MenuButton.cs | Zebra/iFactr-NETCF | dbf30d9591e15fd70ef8230ee3a7492142cf39a3 | [
"MIT"
] | null | null | null | iFactr.Compact/Menus/MenuButton.cs | Zebra/iFactr-NETCF | dbf30d9591e15fd70ef8230ee3a7492142cf39a3 | [
"MIT"
] | 5 | 2017-06-27T17:26:50.000Z | 2021-05-07T17:16:36.000Z | using System;
using System.Windows.Forms;
using iFactr.UI;
using iFactr.Core;
namespace iFactr.Compact
{
class MenuButton : MenuItem, IMenuButton
{
public event EventHandler Clicked;
public string Title
{
get { return Text; }
}
public string ImagePath
{
get { return null; }
set { }
}
public Link NavigationLink { get; set; }
public IPairable Pair
{
get { return _pair; }
set
{
if (_pair != null || value == null) return;
_pair = value;
_pair.Pair = this;
}
}
private IPairable _pair;
public MenuButton(string title)
{
Text = title;
}
protected override void OnClick(EventArgs e)
{
base.OnClick(e);
var click = Clicked;
if (click != null)
{
click(this, e);
}
else
{
CompactFactory.Navigate(NavigationLink, PaneManager.Instance.FromNavContext(PaneManager.Instance.TopmostPane).CurrentView);
}
}
public bool Equals(IMenuButton other)
{
var item = other as UI.MenuButton;
return item != null ? item.Equals(this) : ReferenceEquals(this, other);
}
}
} | 23.080645 | 139 | 0.487072 |
32169f04ae9d292e5280ddf0ca978196bb1ba0da | 266 | swift | Swift | CardStyleTableViewExample/PhotoCell.swift | teambition/CardStyleTableViewCell | d9d0179f555cd745b285baeb7d9ceca7dd995c9a | [
"MIT"
] | 33 | 2016-04-29T06:38:20.000Z | 2021-08-16T03:33:27.000Z | CardStyleTableViewExample/PhotoCell.swift | teambition/CardStyleTableViewCell | d9d0179f555cd745b285baeb7d9ceca7dd995c9a | [
"MIT"
] | 2 | 2020-02-14T13:17:02.000Z | 2020-04-10T08:48:03.000Z | CardStyleTableViewExample/PhotoCell.swift | teambition/CardStyleTableViewCell | d9d0179f555cd745b285baeb7d9ceca7dd995c9a | [
"MIT"
] | 9 | 2016-12-10T09:11:34.000Z | 2020-07-27T08:13:15.000Z | //
// PhotoCell.swift
// CardStyleTableViewExample
//
// Created by 洪鑫 on 16/1/20.
// Copyright © 2016年 Teambition. All rights reserved.
//
import UIKit
class PhotoCell: UITableViewCell {
override func awakeFromNib() {
super.awakeFromNib()
}
}
| 16.625 | 54 | 0.672932 |
c66439f72b275b5d7dda40bc5ea351b65b78d99c | 304 | py | Python | Life2Coding/01-hello-world/main.py | dloperab/Python-GUI | ab71f14c7fbb011af0735f48f5196146de11ea23 | [
"MIT"
] | 2 | 2019-03-17T16:08:09.000Z | 2019-03-29T06:37:16.000Z | Life2Coding/01-hello-world/main.py | dloperab/OpenCV-GUI | ab71f14c7fbb011af0735f48f5196146de11ea23 | [
"MIT"
] | null | null | null | Life2Coding/01-hello-world/main.py | dloperab/OpenCV-GUI | ab71f14c7fbb011af0735f48f5196146de11ea23 | [
"MIT"
] | 2 | 2020-03-14T04:32:19.000Z | 2021-03-09T17:21:33.000Z | import sys
from PyQt5.QtWidgets import QDialog, QApplication
from PyQt5.uic import loadUi
class MainForm(QDialog):
def __init__(self):
super(MainForm, self).__init__()
loadUi('main-form.ui', self)
app = QApplication(sys.argv)
window = MainForm()
window.show()
sys.exit(app.exec_()) | 21.714286 | 49 | 0.713816 |
0dad6a23472f82102a2f5e334a234263fa5fe331 | 807 | cs | C# | Model/IEnumerableUtils.cs | chuongmep/RevitElementBipChecker | d471b2b12f57d0dda1a8b82cdf08d36e5b00e1ce | [
"MIT"
] | 21 | 2021-02-19T07:00:54.000Z | 2022-03-23T10:57:36.000Z | Model/IEnumerableUtils.cs | chuongmep/RevitElementBipChecker | d471b2b12f57d0dda1a8b82cdf08d36e5b00e1ce | [
"MIT"
] | null | null | null | Model/IEnumerableUtils.cs | chuongmep/RevitElementBipChecker | d471b2b12f57d0dda1a8b82cdf08d36e5b00e1ce | [
"MIT"
] | 6 | 2021-02-26T01:00:52.000Z | 2022-01-18T06:17:28.000Z | using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace RevitElementBipChecker.Model
{
public static class IEnumerableUtils
{
/// <summary>
/// Convert From T IEnumerable To ObservableCollection
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="source"></param>
/// <returns></returns>
public static ObservableCollection<T> ToObservableCollection<T>(this IEnumerable<T> source)
{
ObservableCollection<T> newSource = new ObservableCollection<T>();
foreach (T t in source)
{
newSource.Add(t);
}
return newSource;
}
}
}
| 26.9 | 99 | 0.607187 |
da48e56de261638bee65bde4582668ae9dbb2bef | 303 | php | PHP | src/Cli/CommandControllers/DatabaseCli.php | SandratraRM/mora | 46eac532766be51ee961494cf85e7c24a3716986 | [
"MIT"
] | 1 | 2019-01-22T06:20:11.000Z | 2019-01-22T06:20:11.000Z | src/Cli/CommandControllers/DatabaseCli.php | SandratraRM/mora-core | 46eac532766be51ee961494cf85e7c24a3716986 | [
"MIT"
] | null | null | null | src/Cli/CommandControllers/DatabaseCli.php | SandratraRM/mora-core | 46eac532766be51ee961494cf85e7c24a3716986 | [
"MIT"
] | null | null | null | <?php
namespace Mora\Core\Cli\CommandControllers;
use Mora\Core\Control\Controller;
class DatabaseCli extends Controller
{
public function index($params)
{
}
public function actionNotFound($actionName, $params)
{
}
public function init($params)
{
}
} | 12.625 | 56 | 0.643564 |
93bee6efd2a24495222ef6b855730a4da9c8595e | 677 | cs | C# | Src/MultiSafepay/Model/ShoppingCartItem.cs | narendrakongara1987/.Net | 8daacaf2188ab8a91604bb0c8b7eaa2538d02396 | [
"MIT"
] | 42 | 2015-05-29T11:38:58.000Z | 2022-02-20T06:48:17.000Z | Src/MultiSafepay/Model/ShoppingCartItem.cs | narendrakongara1987/.Net | 8daacaf2188ab8a91604bb0c8b7eaa2538d02396 | [
"MIT"
] | 16 | 2015-12-16T21:18:58.000Z | 2020-10-15T08:23:04.000Z | Src/MultiSafepay/Model/ShoppingCartItem.cs | narendrakongara1987/.Net | 8daacaf2188ab8a91604bb0c8b7eaa2538d02396 | [
"MIT"
] | 58 | 2015-09-02T14:04:17.000Z | 2022-03-02T03:38:45.000Z | using Newtonsoft.Json;
namespace MultiSafepay.Model
{
public class ShoppingCartItem : ShoppingCartItemObject
{
public ShoppingCartItem(string merchantItemId, string name, double unitPrice, int quantity, string currency)
{
MerchantItemId = merchantItemId;
Name = name;
UnitPrice = unitPrice;
Quantity = quantity;
Currency = currency;
}
public ShoppingCartItem(string name, double unitPrice, int quantity, string currency)
{
Name = name;
UnitPrice = unitPrice;
Quantity = quantity;
Currency = currency;
}
}
} | 28.208333 | 116 | 0.593796 |
a3320a79466cf03a051cdb736b715f53afdf5a2c | 827 | java | Java | generators/java-ext-apps/templates/java/pizza-demo/src/main/java/com/symphony/platformsolutions/pizza/bot/IMListenerImpl.java | ChristianSymphony/generator-symphony | 089fee0cc5af18d1c2bc14d9c6bf12f311a6359a | [
"MIT"
] | null | null | null | generators/java-ext-apps/templates/java/pizza-demo/src/main/java/com/symphony/platformsolutions/pizza/bot/IMListenerImpl.java | ChristianSymphony/generator-symphony | 089fee0cc5af18d1c2bc14d9c6bf12f311a6359a | [
"MIT"
] | null | null | null | generators/java-ext-apps/templates/java/pizza-demo/src/main/java/com/symphony/platformsolutions/pizza/bot/IMListenerImpl.java | ChristianSymphony/generator-symphony | 089fee0cc5af18d1c2bc14d9c6bf12f311a6359a | [
"MIT"
] | null | null | null | package com.symphony.platformsolutions.pizza.bot;
import clients.SymBotClient;
import listeners.IMListener;
import model.InboundMessage;
import model.OutboundMessage;
import model.Stream;
public class IMListenerImpl implements IMListener {
private SymBotClient botClient;
public IMListenerImpl(SymBotClient botClient) {
this.botClient = botClient;
}
public void onIMMessage(InboundMessage inboundMessage) {
OutboundMessage message;
if (inboundMessage.getMessageText().equalsIgnoreCase("/menu"))
message = PizzaController.getMenuMessage();
else
message = PizzaController.getHelpMessage();
this.botClient.getMessagesClient().sendMessage(inboundMessage.getStream().getStreamId(), message);
}
public void onIMCreated(Stream stream) {}
}
| 29.535714 | 106 | 0.736397 |
652fd2c29cc8274898f1735318be487c0aeb6eb9 | 2,883 | css | CSS | daywise_work/day13_PortfolioImprovements/portfolio/src/App.css | ameshalexk/100DaysOfCodeTracker-Amesh | b72556abd417f222eef50d188e843d62157d7657 | [
"MIT"
] | 1 | 2020-09-27T13:47:16.000Z | 2020-09-27T13:47:16.000Z | daywise_work/day13_PortfolioImprovements/portfolio/src/App.css | ameshalexk/100DaysOfCodeTracker-Amesh | b72556abd417f222eef50d188e843d62157d7657 | [
"MIT"
] | null | null | null | daywise_work/day13_PortfolioImprovements/portfolio/src/App.css | ameshalexk/100DaysOfCodeTracker-Amesh | b72556abd417f222eef50d188e843d62157d7657 | [
"MIT"
] | null | null | null | * {
box-sizing: border-box;
margin: 0;
padding: 0;
font-family: 'PT Sans', sans-serif;
}
.home,
.tapparel,
.coinstance,
.minesweeper,
.covidmapper,
.projects {
display: flex;
height: 90vh;
align-items: center;
justify-content: center;
font-size: 3rem;
}
.about {
display: flex;
height: 20vh;
align-items: center;
justify-content: center;
font-size: 3rem;
}
iframe {
/* width: 50%;
height: 100%; */
margin: 0 auto;
display: block;
height: 90vh;
}
.home {
background-position: center;
background-size: cover;
background-repeat: no-repeat;
color: #fff;
font-size: 100px;
}
.contact-us {
background-position: center;
background-size: cover;
background-repeat: no-repeat;
color: #fff;
font-size: 100px;
}
.mainpage {
display: flex;
flex-direction: column;
place-items: center;
padding: 10px;
}
.mainimage {
display: flex;
flex-direction: column;
place-items: center;
/* padding: 20px; */
height: 90vh;
justify-content:center
}
/* .mainhero {
display: grid;
grid-template-columns: repeat(2, 1fr);
grid-template-rows: 1fr;
grid-column-gap: 0px;
grid-row-gap: 0px;
} */
.mainimage2 {
display: flex;
flex-direction: column;
place-items: center;
/* padding: 20px; */
/* height: 50vh; */
width: 500px;
height : 500px;
margin: 130px;
}
.skillscontainer {
display: grid;
grid-template-columns: repeat(3, 1fr);
grid-template-rows: repeat(3, 1fr);
grid-column-gap: 80px;
grid-row-gap: 80px;
padding: 50px;
}
.mainpage h2 {
text-decoration: underline;
text-align: center;
padding-top: 50px;
}
.maintext {
background-color: lightsteelblue;
padding-left: 50px;
padding-right: 50px;
padding-bottom: 10px;
}
@media screen and (max-width: 960px) {
.skillscontainer {
display: grid;
grid-template-columns: repeat(3, 1fr);
grid-template-rows: repeat(3, 1fr);
grid-column-gap: 20px;
grid-row-gap: 20px;
padding: 20px;
}
.mainimage {
display: flex;
flex-direction: column;
place-items: center;
/* padding: 20px; */
height: 50vh;
justify-content:center
}
.about {
/* display: flex; */
/* height: 20vh; */
/* align-items: center;
justify-content: center;
font-size: 3rem; */
display: flex;
flex-direction: column;
place-items: center;
/* padding: 20px; */
height: 40vh;
width: auto;
font-size: 30px;
justify-content:center
}
iframe {
width: 100%;
height: 100%;
margin: 0 auto;
display: block;
height: 90vh;
}
}
| 17.796296 | 44 | 0.560874 |
a323a5890ebdcb6492f81a2baeee7d66de5f5127 | 1,679 | java | Java | admin-web/src/java/org/jppf/admin/web/health/HealthConstants.java | jppf-grid/JPPF | b98ee419e8d6bbf47ebb31e26ad4ef7c8f93c6e8 | [
"Apache-2.0"
] | 44 | 2018-09-13T02:31:00.000Z | 2022-03-24T01:18:49.000Z | admin-web/src/java/org/jppf/admin/web/health/HealthConstants.java | ParaSatellite/JPPF | b6a23dddb0393a8908c71b441fb214b148648cea | [
"Apache-2.0"
] | 18 | 2019-03-17T21:40:53.000Z | 2021-11-20T06:57:32.000Z | admin-web/src/java/org/jppf/admin/web/health/HealthConstants.java | ParaSatellite/JPPF | b6a23dddb0393a8908c71b441fb214b148648cea | [
"Apache-2.0"
] | 14 | 2019-07-02T09:26:50.000Z | 2022-01-12T07:39:57.000Z | /*
* JPPF.
* Copyright (C) 2005-2019 JPPF Team.
* http://www.jppf.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jppf.admin.web.health;
/**
*
* @author Laurent Cohen
*/
public class HealthConstants {
/**
* System gc action id.
*/
public static String GC_ACTION = "health.gc";
/**
* Get a thread dump action id.
*/
public static String THREAD_DUMP_ACTION = "health.threaddump";
/**
* Trigger heap dump action id.
*/
public static String HEAP_DUMP_ACTION = "health.heap_dump";
/**
* Set thresholds action id.
*/
public static String THRESHOLDS_ACTION = "health.thresholds";
/**
* Expand all action id.
*/
public static String EXPAND_ALL_ACTION = "health.expand";
/**
* Collapse action id.
*/
public static String COLLAPSE_ALL_ACTION = "health.collapse";
/**
* Select drivers action id.
*/
public static String SELECT_DRIVERS_ACTION = "health.select_drivers";
/**
* Select nodes action id.
*/
public static String SELECT_NODES_ACTION = "health.select_nodes";
/**
* Select all action id.
*/
public static String SELECT_ALL_ACTION = "health.select_all";
}
| 26.650794 | 75 | 0.687314 |
64e9fdea42765fa3d36d140d6ccefd66eb7ccdc9 | 132 | cql | SQL | test/data/Cql_for_Cassandra_random_sample_tweet_template.cql | SubhasisDutta/Data-Simulator | 0b8d4856f1d902c8173c99d6166aa757fbc8eac3 | [
"MIT"
] | null | null | null | test/data/Cql_for_Cassandra_random_sample_tweet_template.cql | SubhasisDutta/Data-Simulator | 0b8d4856f1d902c8173c99d6166aa757fbc8eac3 | [
"MIT"
] | null | null | null | test/data/Cql_for_Cassandra_random_sample_tweet_template.cql | SubhasisDutta/Data-Simulator | 0b8d4856f1d902c8173c99d6166aa757fbc8eac3 | [
"MIT"
] | null | null | null | CREATE TABLE "DataSimulator"."TweetTest" (
id uuid,
timestamp timestamp,
authorhandle varchar,
tweet text,
PRIMARY KEY (id)
);
| 16.5 | 42 | 0.734848 |
72fe23457f62cd4d7dc8b68266005929ff2dbcfb | 195 | sql | SQL | apps/server/prisma/migrations/20211115111109_cap_and_image_are_optionals/migration.sql | sairajchouhan/s2media | 396081e149dbe9d53b8811b97a3b319cccacaa95 | [
"MIT"
] | 4 | 2022-01-13T14:13:26.000Z | 2022-03-23T06:49:35.000Z | apps/server/prisma/migrations/20211115111109_cap_and_image_are_optionals/migration.sql | sairajchouhan/s2media | 396081e149dbe9d53b8811b97a3b319cccacaa95 | [
"MIT"
] | 3 | 2021-08-01T14:46:38.000Z | 2021-11-22T08:32:14.000Z | apps/server/prisma/migrations/20211115111109_cap_and_image_are_optionals/migration.sql | sairajchouhan/s2media | 396081e149dbe9d53b8811b97a3b319cccacaa95 | [
"MIT"
] | 2 | 2022-01-14T07:54:51.000Z | 2022-01-18T05:48:31.000Z | -- AlterTable
ALTER TABLE "Post" ALTER COLUMN "url" DROP NOT NULL,
ALTER COLUMN "imagePublicId" DROP NOT NULL;
-- RenameIndex
ALTER INDEX "Profile_userId_unique" RENAME TO "Profile_userId_key";
| 27.857143 | 67 | 0.779487 |
cd867718b7a2502157525748187d245be9daf0f6 | 316 | cs | C# | Assets/Scripts/GameFlow/Utils/Ui/Tutorial/UiOverlayTutorialCommonObject.cs | TheProxor/code-samples-from-pg | e9930f7fe4cf13e082556362db8ccf2172f70906 | [
"Unlicense"
] | null | null | null | Assets/Scripts/GameFlow/Utils/Ui/Tutorial/UiOverlayTutorialCommonObject.cs | TheProxor/code-samples-from-pg | e9930f7fe4cf13e082556362db8ccf2172f70906 | [
"Unlicense"
] | null | null | null | Assets/Scripts/GameFlow/Utils/Ui/Tutorial/UiOverlayTutorialCommonObject.cs | TheProxor/code-samples-from-pg | e9930f7fe4cf13e082556362db8ccf2172f70906 | [
"Unlicense"
] | 2 | 2021-04-14T12:53:52.000Z | 2021-04-24T06:59:52.000Z | using UnityEngine;
namespace Drawmasters.Ui
{
public class UiOverlayTutorialCommonObject : IUiOverlayTutorialObject
{
public UiOverlayTutorialCommonObject(GameObject _gameObject) =>
OverlayTutorialObject = _gameObject;
public GameObject OverlayTutorialObject { get; }
}
}
| 21.066667 | 73 | 0.727848 |
7af09436c74e1572797f8492ac9e4ef7047f9a1a | 325 | cs | C# | src/OrchardCore/OrchardCore.Recipes.Abstractions/Models/ConfigurationContext.cs | protoncm/OrchardCore | 9105b3f23f48badad576973d6716d5fe3fd6d9cf | [
"BSD-3-Clause"
] | 3 | 2018-10-31T14:07:54.000Z | 2019-04-19T23:00:58.000Z | src/OrchardCore/OrchardCore.Recipes.Abstractions/Models/ConfigurationContext.cs | protoncm/OrchardCore | 9105b3f23f48badad576973d6716d5fe3fd6d9cf | [
"BSD-3-Clause"
] | 8 | 2020-07-19T16:45:38.000Z | 2021-05-11T04:43:20.000Z | src/OrchardCore/OrchardCore.Recipes.Abstractions/Models/ConfigurationContext.cs | protoncm/OrchardCore | 9105b3f23f48badad576973d6716d5fe3fd6d9cf | [
"BSD-3-Clause"
] | 6 | 2020-01-16T15:30:37.000Z | 2020-01-17T15:10:06.000Z | using Newtonsoft.Json.Linq;
namespace OrchardCore.Recipes.Models
{
public class ConfigurationContext
{
protected ConfigurationContext(JObject configurationElement)
{
ConfigurationElement = configurationElement;
}
public JObject ConfigurationElement { get; set; }
}
} | 23.214286 | 68 | 0.683077 |
496ec220de8eda50c62fe56b7394d4fea44824d9 | 2,270 | py | Python | onap_data_provider/resources/owning_entity_resource.py | onap/integration-data-provider | 0565394ecbd96730bf982909693514ab88703708 | [
"Apache-2.0"
] | null | null | null | onap_data_provider/resources/owning_entity_resource.py | onap/integration-data-provider | 0565394ecbd96730bf982909693514ab88703708 | [
"Apache-2.0"
] | null | null | null | onap_data_provider/resources/owning_entity_resource.py | onap/integration-data-provider | 0565394ecbd96730bf982909693514ab88703708 | [
"Apache-2.0"
] | null | null | null | """Owning entity resource module."""
"""
Copyright 2021 Deutsche Telekom AG
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from typing import Any, Dict, Optional
from onapsdk.aai.business import OwningEntity # type: ignore
from onapsdk.exceptions import ResourceNotFound # type: ignore
from .resource import Resource
class OwningEntityResource(Resource):
"""Owning entity resource class.
Creates A&AI line of business.
"""
def __init__(self, data: Dict[str, Any]) -> None:
"""Initialize line of business resource.
Args:
data (Dict[str, Any]): Data needed to create resource.
"""
super().__init__(data)
self._owning_entity: Optional[OwningEntity] = None
def create(self) -> None:
"""Create line of business resource."""
logging.debug(f"Create Owning entity {self.data['name']}")
if not self.exists:
self._owning_entity = OwningEntity.create(self.data["name"])
@property
def exists(self) -> bool:
"""Determine if resource already exists or not.
Returns:
bool: True if object exists, False otherwise
"""
return bool(self.owning_entity)
@property
def owning_entity(self) -> OwningEntity:
"""Owning entity property.
Owning entity which is represented by the data provided by user.
Returns:
OwningEntity: Owning entity object
"""
if not self._owning_entity:
try:
self._owning_entity = OwningEntity.get_by_owning_entity_name(
self.data["name"]
)
except ResourceNotFound:
return None
return self._owning_entity
| 29.868421 | 77 | 0.650661 |
c6685f44d8bf347a1349bf04aaa44940bcb0a997 | 13,194 | swift | Swift | SiliconLabsApp/Models/GattConfigurator/SILGattConfiguratorServiceHelper.swift | antam8314/EFRConnect-ios | a8aca8b82335980b7eecc017deec07fe950f7963 | [
"Apache-2.0"
] | null | null | null | SiliconLabsApp/Models/GattConfigurator/SILGattConfiguratorServiceHelper.swift | antam8314/EFRConnect-ios | a8aca8b82335980b7eecc017deec07fe950f7963 | [
"Apache-2.0"
] | null | null | null | SiliconLabsApp/Models/GattConfigurator/SILGattConfiguratorServiceHelper.swift | antam8314/EFRConnect-ios | a8aca8b82335980b7eecc017deec07fe950f7963 | [
"Apache-2.0"
] | null | null | null | //
// SILGattConfiguratorServiceHelper.swift
// BlueGecko
//
// Created by Grzegorz Janosz on 09/03/2021.
// Copyright © 2021 SiliconLabs. All rights reserved.
//
import Foundation
protocol SILGattConfiguratorServiceHelperType : class {
var configuration: SILGattConfigurationEntity! { get set }
init(configuration: SILGattConfigurationEntity)
}
class SILGattConfiguratorServiceHelper: SILGattConfiguratorServiceHelperType {
var configuration: SILGattConfigurationEntity!
var services: [CBMutableService] = []
var advertisementData: [String: Any] = [:]
private var characteristicMap: [CBUUID: [CBUUID: CBMutableCharacteristic]] = [:]
private var repository = SILGattConfigurationRepository.shared
required init(configuration: SILGattConfigurationEntity) {
self.configuration = SILGattConfigurationEntity(value: configuration)
createServices()
createAdvertisementData()
}
private func createAdvertisementData() {
var advertisementData: [String: Any] = [:]
advertisementData[CBAdvertisementDataLocalNameKey] = configuration.name
var uuids: [CBUUID] = []
for service in services {
uuids.append(service.uuid)
}
advertisementData[CBAdvertisementDataServiceUUIDsKey] = uuids
self.advertisementData = advertisementData
}
private func createServices() {
var services: [CBMutableService] = []
for service in configuration.services {
let cbuuid = CBUUID(string: service.cbuuidString)
let newService = CBMutableService(type: cbuuid, primary: service.isPrimary)
newService.characteristics = createCharacteristics(from: service)
debugPrint("created Service: ", newService )
services.append(newService)
}
self.services = services
}
private func createCharacteristics(from service: SILGattConfigurationServiceEntity) -> [CBMutableCharacteristic] {
let serviceCbuuid = CBUUID(string: service.cbuuidString)
self.characteristicMap[serviceCbuuid] = [:]
var characteristics: [CBMutableCharacteristic] = []
for characteristic in service.characteristics {
let characteristicCbuuid = CBUUID(string: characteristic.cbuuidString)
let result = createPropertiesAndPermissions(from: characteristic.properties)
let properties = result.0
let permissions = result.1
let newCharacteristic = CBMutableCharacteristic(type: characteristicCbuuid, properties: properties, value: nil, permissions: permissions)
newCharacteristic.descriptors = createDescriptors(fromCharateristic: characteristic)
characteristics.append(newCharacteristic)
self.characteristicMap[serviceCbuuid]?[characteristicCbuuid] = newCharacteristic
}
return characteristics
}
private func createPropertiesAndPermissions(from properties: [SILGattConfigurationProperty]) -> (CBCharacteristicProperties, CBAttributePermissions) {
var _properties: CBCharacteristicProperties = []
var _permissions: CBAttributePermissions = []
for property in properties {
switch property.type {
case .read:
_properties.insert(.read)
if property.permission == .none {
_permissions.insert(.readable)
} else {
_permissions.insert(.readEncryptionRequired)
}
case .write:
_properties.insert(.write)
if property.permission == .none {
_permissions.insert(.writeable)
} else {
_permissions.insert(.writeEncryptionRequired)
}
case .writeWithoutResponse:
_properties.insert(.writeWithoutResponse)
if property.permission == .none {
_permissions.insert(.writeable)
} else {
_permissions.insert(.writeEncryptionRequired)
}
case .notify:
if property.permission == .none {
_properties.insert(.notify)
} else {
_properties.insert(.notifyEncryptionRequired)
}
case .indicate:
if property.permission == .none {
_properties.insert(.indicate)
} else {
_properties.insert(.indicateEncryptionRequired)
}
}
}
return (_properties, _permissions)
}
func createDescriptors(fromCharateristic characteristic: SILGattConfigurationCharacteristicEntity) -> [CBMutableDescriptor] {
var descriptors: [CBMutableDescriptor] = []
for descriptor in characteristic.descriptors {
if descriptor.cbuuidString == "2904", descriptor.initialValueType == .hex, let value = descriptor.initialValue {
let cbuuid = CBUUID(string: descriptor.cbuuidString)
descriptors.append(CBMutableDescriptor(type: cbuuid, value: Data(hexString: value)))
} else if descriptor.cbuuidString == "2901", descriptor.initialValueType == .text, let value = descriptor.initialValue {
let cbuuid = CBUUID(string: descriptor.cbuuidString)
descriptors.append(CBMutableDescriptor(type: cbuuid, value: value))
} else if isUUID128Right(uuid: descriptor.cbuuidString) {
let cbuuid = CBUUID(string: descriptor.cbuuidString)
switch descriptor.initialValueType {
case .none:
continue
case .text:
descriptors.append(CBMutableDescriptor(type: cbuuid, value: descriptor.initialValue!.data(using: .utf8)))
case .hex:
descriptors.append(CBMutableDescriptor(type: cbuuid, value: Data(hexString: descriptor.initialValue!)))
}
}
}
return descriptors
}
private func isUUID128Right(uuid: String) -> Bool {
let hexRegex = "[0-9a-f]"
let uuid128Regex = try! NSRegularExpression(pattern: "\(hexRegex){8}-\(hexRegex){4}-\(hexRegex){4}-\(hexRegex){4}-\(hexRegex){12}")
return checkRegex(regex: uuid128Regex, text: uuid)
}
private func checkRegex(regex: NSRegularExpression, text: String) -> Bool {
let lowercaseText = text.lowercased()
let textRange = NSRange(location: 0, length: lowercaseText.utf16.count)
if regex.firstMatch(in: lowercaseText, options: [], range: textRange) != nil {
return true
}
return false
}
func setCharacteristicValues() {
for service in configuration.services {
let serviceCbuuid = CBUUID(string: service.cbuuidString)
for characteristic in service.characteristics {
if characteristic.initialValueType == .none {
continue
}
let characteristicCbuuid = CBUUID(string: characteristic.cbuuidString)
let mutableCharacteristic = self.characteristicMap[serviceCbuuid]?[characteristicCbuuid]
setInitialValue(forCharacteristic: mutableCharacteristic, entity: characteristic)
}
}
}
private func setInitialValue(forCharacteristic characteristic: CBMutableCharacteristic?, entity: SILGattConfigurationCharacteristicEntity) {
let value = entity.initialValue!
if entity.initialValueType == .hex {
characteristic?.value = Data(hexString: value)
} else {
characteristic?.value = value.data(using: .ascii)
}
}
func peripheralManager(_ peripheral: CBPeripheralManager, didReceiveRead request: CBATTRequest) {
if let service = request.characteristic.service, let characteristic = self.characteristicMap[service.uuid]?[request.characteristic.uuid], let value = characteristic.value {
if request.offset > characteristic.value?.count ?? 0 {
peripheral.respond(to: request, withResult: .invalidOffset)
return
}
let range = Range(NSRange(location: request.offset, length: value.count - request.offset))!
request.value = value.subdata(in: range)
peripheral.respond(to: request, withResult: .success)
print(characteristic.value?.hexString ?? "none")
return
}
peripheral.respond(to: request, withResult: .attributeNotFound)
}
func peripheralManager(_ peripheral: CBPeripheralManager, didReceiveWrite requests: [CBATTRequest]) {
for request in requests {
if let service = request.characteristic.service, let characteristic = self.characteristicMap[service.uuid]?[request.characteristic.uuid] {
if request.offset > characteristic.value?.count ?? 0 {
peripheral.respond(to: request, withResult: .invalidOffset)
return
}
characteristic.value = request.value
} else {
peripheral.respond(to: request, withResult: .attributeNotFound)
return
}
}
peripheral.respond(to: requests.first!, withResult: .success)
}
func peripheralManager(_ peripheral: CBPeripheralManager, central: CBCentral, didSubscribeTo characteristic: CBCharacteristic) {
if let service = characteristic.service, let characteristic = self.characteristicMap[service.uuid]?[characteristic.uuid] {
if let value = characteristic.value {
peripheral.updateValue(value, for: characteristic, onSubscribedCentrals: [central])
}
print("Subscribed centrals of characteristic after subscription ", characteristic.subscribedCentrals ?? "")
}
}
func peripheralManager(_ peripheral: CBPeripheralManager, central: CBCentral, didUnsubscribeFrom characteristic: CBCharacteristic) {
if let service = characteristic.service, let characteristic = self.characteristicMap[service.uuid]?[characteristic.uuid] {
print("Subscribed centrals of characteristic after unsubscription ", characteristic.subscribedCentrals ?? "")
}
}
func writeToLocalCharacteristic(data: Data, service: CBService, characteristic: CBCharacteristic) {
if let characteristic = self.characteristicMap[service.uuid]?[characteristic.uuid] {
characteristic.value = data
updateInitialValue(data: data, service: service, characteristic: characteristic)
print("Writing to local characteristic value")
}
}
func updateLocalCharacteristicValue(peripheral: CBPeripheralManager?, data: Data, service: CBService, characteristic: CBCharacteristic) {
if let characteristic = self.characteristicMap[service.uuid]?[characteristic.uuid], let peripheral = peripheral {
characteristic.value = data
peripheral.updateValue(data, for: characteristic, onSubscribedCentrals: nil)
updateInitialValue(data: data, service: service, characteristic: characteristic)
print("Updating local characteristic value")
}
}
private func updateInitialValue(data: Data, service: CBService, characteristic: CBCharacteristic) {
if let serviceEntity = configuration.services.first(where: { serviceEntity in
return CBUUID(string: serviceEntity.cbuuidString) == service.uuid
}) {
if let characteristicEntity = serviceEntity.characteristics.first(where: { characteristicEntity in
return CBUUID(string: characteristicEntity.cbuuidString) == characteristic.uuid
}) {
let characteristicEntity = SILGattConfigurationCharacteristicEntity(value: characteristicEntity)
if let dataString = String(data: data, encoding: .utf8) {
characteristicEntity.initialValue = dataString
characteristicEntity.initialValueType = .text
} else {
characteristicEntity.initialValue = data.hexString
characteristicEntity.initialValueType = .hex
}
repository.update(characteristic: characteristicEntity)
}
}
}
}
extension Data {
private static let hexRegex = try! NSRegularExpression(pattern: "^([a-fA-F0-9][a-fA-F0-9])*$", options: [])
init?(hexString: String) {
if Data.hexRegex.matches(in: hexString, range: NSMakeRange(0, hexString.count)).isEmpty {
return nil
}
let chars = Array(hexString)
let bytes: [UInt8] =
stride(from: 0, to: chars.count, by: 2)
.map {UInt8(String([chars[$0], chars[$0+1]]), radix: 16)}
.compactMap{$0}
self = Data(bytes)
}
var hexString: String {
return map { String(format: "%02hhx", $0) }.joined()
}
}
| 46.621908 | 180 | 0.637335 |
05d38363d3312f9f98526719370f057a55b7b829 | 402 | dart | Dart | widget_test/shared/navigation.dart | tmcmaster/scaffold_widget | 066a0c768402fcd342cd8d24525298ea50813454 | [
"MIT"
] | null | null | null | widget_test/shared/navigation.dart | tmcmaster/scaffold_widget | 066a0c768402fcd342cd8d24525298ea50813454 | [
"MIT"
] | null | null | null | widget_test/shared/navigation.dart | tmcmaster/scaffold_widget | 066a0c768402fcd342cd8d24525298ea50813454 | [
"MIT"
] | null | null | null | import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
class Navigation {
static final items = [
BottomNavigationBarItem(
icon: Icon(Icons.home),
label: 'Home',
),
BottomNavigationBarItem(
icon: Icon(Icons.mail),
label: 'Messages',
),
BottomNavigationBarItem(
icon: Icon(Icons.person),
label: 'Profile',
)
];
}
| 20.1 | 40 | 0.626866 |
a9e3411edf3b9b6e5bbbbfe0e3d298306f607477 | 273 | php | PHP | resources/views/langpartials/audio_de.blade.php | tripats/kdonline | a3854b1d045d7ebd0d38ca61c9f20cb5f4d6c837 | [
"MIT"
] | null | null | null | resources/views/langpartials/audio_de.blade.php | tripats/kdonline | a3854b1d045d7ebd0d38ca61c9f20cb5f4d6c837 | [
"MIT"
] | null | null | null | resources/views/langpartials/audio_de.blade.php | tripats/kdonline | a3854b1d045d7ebd0d38ca61c9f20cb5f4d6c837 | [
"MIT"
] | null | null | null | <div class="alert alert-info">
<h3>
Audiodateien bitte nur im Standardformat MPEG-1/2 Audio Layer 3, besser bekannt als *.MP3 hochladen.
</h3>
Maximale Dateigröße = 8 MB
Es sind Maximal {{Config::get('application.max_audio_count')}} Dateien pro Bewerbung möglich.
</div>
| 30.333333 | 101 | 0.750916 |
eba4dffbf6076c68d7154801be33459309216870 | 194 | sql | SQL | src/UNStatistic/UNStatistic.DB/dm/views/vw_DimGeography.sql | AndreyQC/UN-statistic | dee3640426855a52d42f36f1bd3dc9b42757b372 | [
"MIT"
] | 1 | 2021-12-18T13:16:01.000Z | 2021-12-18T13:16:01.000Z | src/UNStatistic/UNStatistic.DB/dm/views/vw_DimGeography.sql | AndreyQC/UN-statistic | dee3640426855a52d42f36f1bd3dc9b42757b372 | [
"MIT"
] | 2 | 2020-02-09T17:06:21.000Z | 2020-02-13T09:39:03.000Z | src/UNStatistic/UNStatistic.DB/dm/views/vw_DimGeography.sql | AndreyQC/UN-statistic | dee3640426855a52d42f36f1bd3dc9b42757b372 | [
"MIT"
] | 1 | 2021-12-27T09:34:26.000Z | 2021-12-27T09:34:26.000Z | CREATE VIEW [dm].[vw_DimGeography]
AS
SELECT
[DimGeographyKey]
,[Region]
,[SubRegion]
,[Country]
,[CountryCode]
FROM [dwh].[DimGeography]
| 19.4 | 35 | 0.525773 |
2779004a832a6391a5c5a86e8c2ebd95b38ed38a | 1,255 | kt | Kotlin | generator/graphql-kotlin-schema-generator/src/main/kotlin/com/expediagroup/graphql/generator/execution/PropertyDataFetcher.kt | majerm/graphql-kotlin | b5faccbda2a17fac63a0c79ca5bc3f95d905999f | [
"Apache-2.0"
] | 1,235 | 2019-09-09T16:44:44.000Z | 2022-03-30T11:11:18.000Z | generator/graphql-kotlin-schema-generator/src/main/kotlin/com/expediagroup/graphql/generator/execution/PropertyDataFetcher.kt | majerm/graphql-kotlin | b5faccbda2a17fac63a0c79ca5bc3f95d905999f | [
"Apache-2.0"
] | 628 | 2019-09-06T13:58:07.000Z | 2022-03-30T12:46:56.000Z | generator/graphql-kotlin-schema-generator/src/main/kotlin/com/expediagroup/graphql/generator/execution/PropertyDataFetcher.kt | majerm/graphql-kotlin | b5faccbda2a17fac63a0c79ca5bc3f95d905999f | [
"Apache-2.0"
] | 260 | 2019-09-06T13:50:15.000Z | 2022-03-21T16:13:53.000Z | /*
* Copyright 2021 Expedia, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.expediagroup.graphql.generator.execution
import graphql.schema.DataFetcher
import graphql.schema.DataFetchingEnvironment
import kotlin.reflect.KProperty
/**
* Property [DataFetcher] that directly invokes underlying property getter.
*
* @param propertyGetter Kotlin property getter that will be invoked to resolve a field
*/
class PropertyDataFetcher(private val propertyGetter: KProperty.Getter<*>) : DataFetcher<Any?> {
/**
* Invokes target getter function.
*/
override fun get(environment: DataFetchingEnvironment): Any? = environment.getSource<Any?>()?.let { instance ->
propertyGetter.call(instance)
}
}
| 33.918919 | 115 | 0.74502 |
cd5115240227833976d3be49cc454c5a6923e363 | 5,502 | cs | C# | Tests/TestingTests.cs | worldbeater/CoreRPC | 7d3dba28119f3514d472b0348742fd07417117a5 | [
"MIT"
] | null | null | null | Tests/TestingTests.cs | worldbeater/CoreRPC | 7d3dba28119f3514d472b0348742fd07417117a5 | [
"MIT"
] | null | null | null | Tests/TestingTests.cs | worldbeater/CoreRPC | 7d3dba28119f3514d472b0348742fd07417117a5 | [
"MIT"
] | null | null | null | using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using CoreRPC.AspNetCore;
using CoreRPC.Testing;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Hosting.Server.Features;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.DependencyInjection;
using Xunit;
namespace Tests
{
public class TestingTests
{
public class InjectableService
{
public int Sum(int a, int b) => a + b;
public int Multiply(int a, int b) => a * b;
}
[RegisterRpc]
public class AnonComputeRpc
{
private readonly InjectableService _service;
public AnonComputeRpc(InjectableService service) => _service = service;
public int Sum(int a, int b) => _service.Sum(a, b);
public int Multiply(int a, int b) => _service.Multiply(a, b);
}
[RegisterRpc]
public class AnonGreeterRpc
{
public string Greet(string name) => $"Hello, {name}!";
}
[RegisterRpc]
public class SecureComputeRpc : IHttpContextAwareRpc
{
private const string SecureToken = "foobar";
private readonly InjectableService _service;
public SecureComputeRpc(InjectableService service) => _service = service;
public int Sum(int a, int b) => _service.Sum(a, b);
public int Multiply(int a, int b) => _service.Multiply(a, b);
Task<object> IHttpContextAwareRpc.OnExecuteRpcCall(HttpContext context, Func<Task<object>> action)
{
var header = context.Request.Headers["X-Auth"].FirstOrDefault();
if (header != null && header == SecureToken)
return action();
context.Response.StatusCode = 401;
return Task.FromResult((object) new { Error = "Not authorized!" });
}
}
public class Startup
{
public void ConfigureServices(IServiceCollection services) => services.AddSingleton<InjectableService>();
public void Configure(IApplicationBuilder app) =>
app.UseCoreRpc("/rpc", options => options.RpcTypeResolver = () => new[]
{
typeof(SecureComputeRpc),
typeof(AnonComputeRpc),
typeof(AnonGreeterRpc),
});
}
// This is a sample helper class used for writing unit tests.
public sealed class AnonRpcList : RpcListBase
{
public AnonRpcList(string uri) : base(uri) { }
public IRpcExec<AnonComputeRpc> Compute => Get<AnonComputeRpc>();
public IRpcExec<AnonGreeterRpc> Greeter => Get<AnonGreeterRpc>();
}
// This is a sample helper class used for writing unit tests for authorized RPCs.
public sealed class SecureRpcList : RpcListBase
{
public SecureRpcList(string uri) : base(uri, new Dictionary<string, string> {["X-Auth"] = "foobar"}) { }
public IRpcExec<SecureComputeRpc> Compute => Get<SecureComputeRpc>();
}
// This RPC will unable to access secure APIs, because no header is provided.
public sealed class InSecureRpcList : RpcListBase
{
public InSecureRpcList(string uri) : base(uri) { }
public IRpcExec<SecureComputeRpc> Compute => Get<SecureComputeRpc>();
}
[Theory]
[InlineData(1, 1, 2, 1)]
[InlineData(2, 2, 4, 4)]
[InlineData(3, 2, 5, 6)]
public async Task ShouldCallRemoteProceduresWithoutInterfaceDefinition(int a, int b, int sum, int product)
{
var host = new WebHostBuilder()
.UseKestrel()
.UseFreePort()
.UseStartup<Startup>()
.Build();
await host.StartAsync();
var addresses = host.ServerFeatures.Get<IServerAddressesFeature>();
var address = addresses.Addresses.First().TrimEnd('/') + "/rpc";
var anon = new AnonRpcList(address);
Assert.Equal("Hello, John!", anon.Greeter.Call(api => api.Greet("John")));
Assert.Equal(sum, anon.Compute.Call(api => api.Sum(a, b)));
Assert.Equal(product, anon.Compute.Call(api => api.Multiply(a, b)));
var secure = new SecureRpcList(address);
Assert.Equal(sum, secure.Compute.Call(api => api.Sum(a, b)));
Assert.Equal(product, secure.Compute.Call(api => api.Multiply(a, b)));
}
[Fact]
public async Task InSecureProcedureCallsShouldFail()
{
var host = new WebHostBuilder()
.UseKestrel()
.UseFreePort()
.UseStartup<Startup>()
.Build();
await host.StartAsync();
var addresses = host.ServerFeatures.Get<IServerAddressesFeature>();
var address = addresses.Addresses.First().TrimEnd('/') + "/rpc";
var insecure = new InSecureRpcList(address);
Assert.ThrowsAny<Exception>(() => insecure.Compute.Call(api => api.Sum(2, 2)));
Assert.ThrowsAny<Exception>(() => insecure.Compute.Call(api => api.Multiply(2, 2)));
}
}
} | 37.428571 | 118 | 0.569975 |
b0a8026693675c2a22565d6daa5748a1dba33799 | 269 | py | Python | tests/test_import.py | mace-space/histLM | 3f6460f6fad703feb99e48f244396f8ef007c582 | [
"MIT"
] | 6 | 2021-05-24T12:40:30.000Z | 2022-02-09T11:04:07.000Z | tests/test_import.py | mace-space/histLM | 3f6460f6fad703feb99e48f244396f8ef007c582 | [
"MIT"
] | 1 | 2021-09-30T09:11:15.000Z | 2021-09-30T09:11:15.000Z | tests/test_import.py | mace-space/histLM | 3f6460f6fad703feb99e48f244396f8ef007c582 | [
"MIT"
] | 19 | 2021-06-03T12:25:42.000Z | 2021-07-30T13:54:21.000Z | import pytest
def test_import():
from transformers import pipeline
from flair.embeddings import FlairEmbeddings
from flair.data import Sentence
import flair, torch
from gensim.models import FastText
from gensim.models import Word2Vec
| 16.8125 | 48 | 0.739777 |
70b31b47e1f3c138ff8b456b56aaf02884a76073 | 3,553 | lua | Lua | CoreScripts/Modules/PlayerList/Components/PlayerListApp.spec.lua | jackrestaki/Core-Scripts | 31e68fd04caa585a0a52889aa2efda37ced4203e | [
"Apache-2.0"
] | null | null | null | CoreScripts/Modules/PlayerList/Components/PlayerListApp.spec.lua | jackrestaki/Core-Scripts | 31e68fd04caa585a0a52889aa2efda37ced4203e | [
"Apache-2.0"
] | null | null | null | CoreScripts/Modules/PlayerList/Components/PlayerListApp.spec.lua | jackrestaki/Core-Scripts | 31e68fd04caa585a0a52889aa2efda37ced4203e | [
"Apache-2.0"
] | null | null | null | return function()
local CoreGui = game:GetService("CoreGui")
local CorePackages = game:GetService("CorePackages")
local RobloxReplicatedStorage = game:GetService("RobloxReplicatedStorage")
local Roact = require(CorePackages.Roact)
local RoactRodux = require(CorePackages.RoactRodux)
local Rodux = require(CorePackages.Rodux)
local UIBlox = require(CorePackages.UIBlox)
local Flags = script.Parent.Parent.Parent.Flags
local GetFFlagRemoveInGameFollowingEvents = require(Flags.GetFFlagRemoveInGameFollowingEvents)
local PlayerList = script.Parent.Parent
local Reducers = PlayerList.Reducers
local Reducer = require(Reducers.Reducer)
local Connection = PlayerList.Components.Connection
local LayoutValues = require(Connection.LayoutValues)
local LayoutValuesProvider = LayoutValues.Provider
local CreateLayoutValues = require(PlayerList.CreateLayoutValues)
local Actions = PlayerList.Actions
local SetTenFootInterface = require(Actions.SetTenFootInterface)
--Create dummy events in RobloxReplicatedStorage:
local NewPlayerGroupDetails = Instance.new("RemoteEvent")
NewPlayerGroupDetails.Name = "NewPlayerGroupDetails"
NewPlayerGroupDetails.Parent = RobloxReplicatedStorage
local FollowRelationshipChanged
local GetFollowRelationships
local NewFollower
if not GetFFlagRemoveInGameFollowingEvents() then
FollowRelationshipChanged = Instance.new("RemoteEvent")
FollowRelationshipChanged.Name = "FollowRelationshipChanged"
FollowRelationshipChanged.Parent = RobloxReplicatedStorage
GetFollowRelationships = Instance.new("RemoteFunction")
GetFollowRelationships.Name = "GetFollowRelationships"
GetFollowRelationships.Parent = RobloxReplicatedStorage
NewFollower = Instance.new("RemoteEvent")
NewFollower.Name = "NewFollower"
NewFollower.Parent = RobloxReplicatedStorage
end
local RobloxGui = CoreGui:WaitForChild("RobloxGui")
local SendNotificationInfo = Instance.new("BindableEvent")
SendNotificationInfo.Name = "SendNotificationInfo"
SendNotificationInfo.Parent = RobloxGui
local PlayerListApp = require(script.Parent.PlayerListApp)
local AppDarkTheme = require(CorePackages.AppTempCommon.LuaApp.Style.Themes.DarkTheme)
local AppFont = require(CorePackages.AppTempCommon.LuaApp.Style.Fonts.Gotham)
local appStyle = {
Theme = AppDarkTheme,
Font = AppFont,
}
it("should create and destroy without errors", function()
local element = Roact.createElement(RoactRodux.StoreProvider, {
store = Rodux.Store.new(Reducer, nil, {
Rodux.thunkMiddleware,
})
}, {
LayoutValuesProvider = Roact.createElement(LayoutValuesProvider, {
layoutValues = CreateLayoutValues(false)
}, {
ThemeProvider = Roact.createElement(UIBlox.Style.Provider, {
style = appStyle,
}, {
PlayerListApp = Roact.createElement(PlayerListApp),
})
})
})
local instance = Roact.mount(element)
Roact.unmount(instance)
end)
it("should create and destroy without errors tenfoot", function()
local store = Rodux.Store.new(Reducer, nil, {
Rodux.thunkMiddleware,
})
store:dispatch(SetTenFootInterface(true))
local element = Roact.createElement(RoactRodux.StoreProvider, {
store = store
}, {
LayoutValuesProvider = Roact.createElement(LayoutValuesProvider, {
layoutValues = CreateLayoutValues(true)
}, {
ThemeProvider = Roact.createElement(UIBlox.Style.Provider, {
style = appStyle,
}, {
PlayerListApp = Roact.createElement(PlayerListApp),
})
})
})
local instance = Roact.mount(element)
Roact.unmount(instance)
end)
end
| 32.59633 | 95 | 0.780467 |
d03385faa87ef6c1f7ee3dd2530fd7dd722e5ca5 | 487 | sh | Shell | scripts/cleanup-tophat.sh | SilicoSciences/bi-seminar | fc14e10662cc1022d756dd3fa2136e9a7df4e432 | [
"CC0-1.0"
] | null | null | null | scripts/cleanup-tophat.sh | SilicoSciences/bi-seminar | fc14e10662cc1022d756dd3fa2136e9a7df4e432 | [
"CC0-1.0"
] | null | null | null | scripts/cleanup-tophat.sh | SilicoSciences/bi-seminar | fc14e10662cc1022d756dd3fa2136e9a7df4e432 | [
"CC0-1.0"
] | null | null | null | #!/bin/bash
set -e
tophatOut=tophat_out*
function f_doIt {
echo "dollarOne: ${1}"
for f in ${1}/$tophatOut/accepted_hits.bam
do
echo "f: ${f}"
echo "+++++++++++++++++++++++++++++++++++++++++++"
echo "Cleaning $1/$tophatOut/"
echo "-------------------------------------------"
if [[ -f $f ]]; then
echo "hieroglyph: ${f%/*}"
rm -rf "${f%/*}"/tmp
rm -rf "${f%/*}"/logs
rm -f "${f%/*}"/*_sorted.bam
rm -f "${f%/*}"/*.bai
fi
done
}
while [ "$1" != "" ]; do
f_doIt $1 && shift;
done;
| 16.233333 | 50 | 0.449692 |
4b8ea7f2aed29595e152b05388b165c9910b2f5b | 9,121 | cc | C++ | base/linux_input.cc | rkb-1/quad | 66ae3bc5ccb6db070bc1e32a3b9386f6d01a049e | [
"Apache-2.0"
] | 64 | 2017-01-18T15:12:05.000Z | 2022-02-16T08:28:11.000Z | base/linux_input.cc | rkb-1/quad | 66ae3bc5ccb6db070bc1e32a3b9386f6d01a049e | [
"Apache-2.0"
] | 2 | 2021-02-11T14:39:38.000Z | 2021-10-03T16:49:57.000Z | base/linux_input.cc | rkb-1/quad | 66ae3bc5ccb6db070bc1e32a3b9386f6d01a049e | [
"Apache-2.0"
] | 14 | 2021-01-11T09:48:34.000Z | 2021-12-16T16:20:35.000Z | // Copyright 2014-2020 Josh Pieper, jjp@pobox.com.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "linux_input.h"
#include <linux/input.h>
#include <boost/asio/posix/stream_descriptor.hpp>
#include <boost/asio/post.hpp>
#include <fmt/format.h>
#include "mjlib/base/fail.h"
#include "mjlib/base/system_error.h"
namespace mjmech {
namespace base {
class LinuxInput::Impl : boost::noncopyable {
public:
Impl(const boost::asio::any_io_executor& executor) : executor_(executor) {}
boost::asio::any_io_executor executor_;
boost::asio::posix::stream_descriptor stream_{executor_};
struct input_event input_event_;
std::map<int, AbsInfo> abs_info_;
};
LinuxInput::LinuxInput(const boost::asio::any_io_executor& executor)
: impl_(new Impl(executor)) {}
LinuxInput::LinuxInput(const boost::asio::any_io_executor& executor,
const std::string& device)
: LinuxInput(executor) {
Open(device);
}
LinuxInput::~LinuxInput() {}
boost::asio::any_io_executor LinuxInput::get_executor() {
return impl_->executor_;
}
void LinuxInput::Open(const std::string& device) {
int fd = ::open(device.c_str(), O_RDONLY | O_NONBLOCK);
if (fd < 0) {
throw mjlib::base::system_error::syserrno("opening device: " + device);
}
impl_->stream_.assign(fd);
auto f = features(EV_ABS);
for (size_t i = 0; i < f.capabilities.size(); ++i) {
if (!f.capabilities.test(i)) { continue; }
struct cgabs {
cgabs(int axis) : axis(axis) {}
int name() const { return EVIOCGABS(axis); }
void* data() { return &abs_info; }
const int axis;
struct input_absinfo abs_info;
};
cgabs op(i);
impl_->stream_.io_control(op);
AbsInfo info;
info.axis = i;
info.minimum = op.abs_info.minimum;
info.maximum = op.abs_info.maximum;
info.fuzz = op.abs_info.fuzz;
info.flat = op.abs_info.flat;
info.resolution = op.abs_info.resolution;
info.value = op.abs_info.value;
impl_->abs_info_[i] = info;
}
}
int LinuxInput::fileno() const {
return impl_->stream_.native_handle();
}
std::string LinuxInput::name() const {
struct cgname {
int name() const { return EVIOCGNAME(256); }
void* data() { return buffer; }
char buffer[257] = {};
};
cgname op;
impl_->stream_.io_control(op);
return std::string(op.buffer);
}
LinuxInput::AbsInfo LinuxInput::abs_info(int axis) const {
auto it = impl_->abs_info_.find(axis);
if (it != impl_->abs_info_.end()) { return it->second; }
AbsInfo result;
result.axis = axis;
result.minimum = -100;
result.maximum = 100;
result.resolution = 100;
return result;
}
LinuxInput::Features LinuxInput::features(int ev_type) const {
struct cgbit {
cgbit(int name) : name_(name) {}
int name() const { return EVIOCGBIT(name_, sizeof(buffer)); }
void* data() { return buffer; }
const int name_;
char buffer[256] = {};
};
cgbit op(ev_type);
impl_->stream_.io_control(op);
size_t end = [ev_type]() {
switch (ev_type) {
case EV_SYN: { return SYN_MAX; }
case EV_KEY: { return KEY_MAX; }
case EV_REL: { return REL_MAX; }
case EV_ABS: { return ABS_MAX; }
}
mjlib::base::AssertNotReached();
}() / 8 + 1;
Features result;
result.ev_type = ev_type;
for (size_t i = 0; i < end; i++) {
for (int bit = 0; bit < 8; bit++) {
result.capabilities.push_back((op.buffer[i] >> bit) & 0x1 ? true : false);
}
}
return result;
}
void LinuxInput::AsyncRead(Event* event, mjlib::io::ErrorCallback handler) {
impl_->stream_.async_read_some(
boost::asio::buffer(&impl_->input_event_, sizeof(impl_->input_event_)),
[event, handler=std::move(handler), this] (
mjlib::base::error_code ec, std::size_t size) mutable {
if (ec) {
ec.Append("reading input event");
boost::asio::post(
impl_->executor_,
std::bind(std::move(handler), ec));
return;
}
if (size != sizeof(impl_->input_event_)) {
boost::asio::post(
impl_->executor_,
std::bind(
std::move(handler),
mjlib::base::error_code::einval("short read for input event")));
return;
}
event->ev_type = impl_->input_event_.type;
event->code = impl_->input_event_.code;
event->value = impl_->input_event_.value;
/// Update our internal absolute structure if necessary.
if (event->ev_type == EV_ABS) {
auto it = impl_->abs_info_.find(event->code);
if (it != impl_->abs_info_.end()) {
it->second.value = event->value;
}
}
boost::asio::post(
impl_->executor_,
std::bind(std::move(handler), mjlib::base::error_code()));
});
}
void LinuxInput::cancel() {
impl_->stream_.cancel();
}
std::ostream& operator<<(std::ostream& ostr, const LinuxInput& rhs) {
ostr << fmt::format("<LinuxInput '{}'>", rhs.name());
return ostr;
}
namespace {
std::string MapBitmask(boost::dynamic_bitset<> bitset,
std::function<std::string (int)> mapper) {
std::vector<std::string> elements;
for (size_t i = 0; i < bitset.size(); i++) {
if (bitset.test(i)) { elements.push_back(mapper(i)); }
}
std::ostringstream result;
for (size_t i = 0; i < elements.size(); i++) {
if (i != 0) { result << "|"; }
result << elements[i];
}
return result.str();
}
std::string MapEvType(int ev_type) {
switch (ev_type) {
case EV_SYN: { return "EV_SYN"; }
case EV_KEY: { return "EV_KEY"; }
case EV_REL: { return "EV_REL"; }
case EV_ABS: { return "EV_ABS"; }
default: { return fmt::format("EV_{:02X}", ev_type); }
}
}
std::string MapSyn(int code) {
switch (code) {
case SYN_REPORT: { return "SYN_REPORT"; }
case SYN_CONFIG: { return "SYN_CONFIG"; }
case SYN_MT_REPORT: { return "SYN_MT_REPORT"; }
case SYN_DROPPED: { return "SYN_DROPPED"; }
default: { return fmt::format("SYN_{:02X}", code); }
}
}
std::string MapKey(int code) {
return fmt::format("KEY_{:03X}", code);
}
std::string MapRel(int code) {
switch (code) {
case REL_X: { return "REL_X"; }
case REL_Y: { return "REL_Y"; }
case REL_Z: { return "REL_Z"; }
case REL_RX: { return "REL_RX"; }
case REL_RY: { return "REL_RY"; }
case REL_RZ: { return "REL_RZ"; }
case REL_HWHEEL: { return "REL_HWHEEL"; }
case REL_DIAL: { return "REL_DIAL"; }
case REL_WHEEL: { return "REL_WHEEL"; }
case REL_MISC: { return "REL_MISC"; }
default: { return fmt::format("REL_{:02X}", code); }
}
}
std::string MapAbs(int code) {
switch (code) {
case ABS_X: { return "ABS_X"; }
case ABS_Y: { return "ABS_Y"; }
case ABS_Z: { return "ABS_Z"; }
case ABS_RX: { return "ABS_RX"; }
case ABS_RY: { return "ABS_RY"; }
case ABS_RZ: { return "ABS_RZ"; }
case ABS_HAT0X: { return "ABS_HAT0X"; }
case ABS_HAT0Y: { return "ABS_HAT0Y"; }
case ABS_HAT1X: { return "ABS_HAT1X"; }
case ABS_HAT1Y: { return "ABS_HAT1Y"; }
case ABS_HAT2X: { return "ABS_HAT2X"; }
case ABS_HAT2Y: { return "ABS_HAT2Y"; }
case ABS_HAT3X: { return "ABS_HAT3X"; }
case ABS_HAT3Y: { return "ABS_HAT3Y"; }
default: { return fmt::format("ABS_{:02X}", code); }
}
}
std::string MapUnknown(int code) {
return fmt::format("{:03X}", code);
}
std::function<std::string (int)> MakeCodeMapper(int ev_type) {
switch (ev_type) {
case EV_SYN: { return MapSyn; }
case EV_KEY: { return MapKey; }
case EV_REL: { return MapRel; }
case EV_ABS: { return MapAbs; }
default: { return MapUnknown; }
}
}
}
std::ostream& operator<<(std::ostream& ostr, const LinuxInput::AbsInfo& rhs) {
ostr << fmt::format("<AbsInfo {} val={} min={} max={} scaled={}>",
MapAbs(rhs.axis),
rhs.value,
rhs.minimum,
rhs.maximum,
rhs.scaled());
return ostr;
}
std::ostream& operator<<(std::ostream& ostr, const LinuxInput::Features& rhs) {
ostr << fmt::format("<Features type={} {}>",
MapEvType(rhs.ev_type),
MapBitmask(rhs.capabilities, MakeCodeMapper(rhs.ev_type)));
return ostr;
}
std::ostream& operator<<(std::ostream& ostr, const LinuxInput::Event& rhs) {
ostr << fmt::format("<Event ev_type={} code={} value={}>",
MapEvType(rhs.ev_type),
MakeCodeMapper(rhs.ev_type)(rhs.code),
rhs.value);
return ostr;
}
}
}
| 27.978528 | 82 | 0.609802 |
fae903e9e771cb995eef1156eea6b319c9d36de1 | 8,009 | sql | SQL | src/test/regress/sql/float8.sql | deart2k/gpdb | df0144f8536c34a19e1c0158580e79a3906ace2e | [
"PostgreSQL",
"Apache-2.0"
] | 34 | 2021-01-18T14:25:24.000Z | 2021-06-05T03:21:10.000Z | src/test/regress/sql/float8.sql | deart2k/gpdb | df0144f8536c34a19e1c0158580e79a3906ace2e | [
"PostgreSQL",
"Apache-2.0"
] | 134 | 2018-08-09T09:51:53.000Z | 2022-03-29T03:17:27.000Z | src/test/regress/sql/float8.sql | deart2k/gpdb | df0144f8536c34a19e1c0158580e79a3906ace2e | [
"PostgreSQL",
"Apache-2.0"
] | 8 | 2018-05-21T16:20:39.000Z | 2021-11-01T07:05:39.000Z | --
-- FLOAT8
--
CREATE TABLE FLOAT8_TBL(i INT DEFAULT 1, f1 float8);
INSERT INTO FLOAT8_TBL(f1) VALUES (' 0.0 ');
INSERT INTO FLOAT8_TBL(f1) VALUES ('1004.30 ');
INSERT INTO FLOAT8_TBL(f1) VALUES (' -34.84');
INSERT INTO FLOAT8_TBL(f1) VALUES ('1.2345678901234e+200');
INSERT INTO FLOAT8_TBL(f1) VALUES ('1.2345678901234e-200');
-- test for underflow and overflow handling
SELECT '10e400'::float8;
SELECT '-10e400'::float8;
SELECT '1e309'::float8;
SELECT '10e-400'::float8;
SELECT '-10e-400'::float8;
SELECT '1e-324'::float8;
SELECT '1e308'::float8;
SELECT '1e-323'::float8;
SELECT '0.0'::float8;
-- bad input
INSERT INTO FLOAT8_TBL(f1) VALUES ('');
INSERT INTO FLOAT8_TBL(f1) VALUES (' ');
INSERT INTO FLOAT8_TBL(f1) VALUES ('xyz');
INSERT INTO FLOAT8_TBL(f1) VALUES ('5.0.0');
INSERT INTO FLOAT8_TBL(f1) VALUES ('5 . 0');
INSERT INTO FLOAT8_TBL(f1) VALUES ('5. 0');
INSERT INTO FLOAT8_TBL(f1) VALUES (' - 3');
INSERT INTO FLOAT8_TBL(f1) VALUES ('123 5');
-- special inputs
SELECT 'NaN'::float8;
SELECT 'nan'::float8;
SELECT ' NAN '::float8;
SELECT 'infinity'::float8;
SELECT 'inf'::float8;
SELECT ' -INFINiTY '::float8;
SELECT '+Infinity'::float8;
SELECT '+INF'::float8;
SELECT '+inf'::float8;
SELECT '+INFINITY'::float8;
-- bad special inputs
SELECT 'N A N'::float8;
SELECT 'NaN x'::float8;
SELECT ' INFINITY x'::float8;
SELECT 'Infinity'::float8 + 100.0;
SELECT 'Infinity'::float8 / 'Infinity'::float8;
SELECT 'nan'::float8 / 'nan'::float8;
SELECT 'nan'::numeric::float8;
SELECT '' AS five, f1 FROM FLOAT8_TBL ORDER BY 2;
SELECT '' AS four, f.f1 FROM FLOAT8_TBL f WHERE f.f1 <> '1004.3' ORDER BY 2;
SELECT '' AS one, f.f1 FROM FLOAT8_TBL f WHERE f.f1 = '1004.3' ORDER BY 2;
SELECT '' AS three, f.f1 FROM FLOAT8_TBL f WHERE '1004.3' > f.f1 ORDER BY 2;
SELECT '' AS three, f.f1 FROM FLOAT8_TBL f WHERE f.f1 < '1004.3' ORDER BY 2;
SELECT '' AS four, f.f1 FROM FLOAT8_TBL f WHERE '1004.3' >= f.f1 ORDER BY 2;
SELECT '' AS four, f.f1 FROM FLOAT8_TBL f WHERE f.f1 <= '1004.3' ORDER BY 2;
SELECT '' AS three, f.f1, f.f1 * '-10' AS x
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0' ORDER BY 2;
SELECT '' AS three, f.f1, f.f1 + '-10' AS x
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0' ORDER BY 2;
SELECT '' AS three, f.f1, f.f1 / '-10' AS x
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0' ORDER BY 2;
SELECT '' AS three, f.f1, f.f1 - '-10' AS x
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0' ORDER BY 2;
SELECT '' AS one, f.f1 ^ '2.0' AS square_f1
FROM FLOAT8_TBL f where f.f1 = '1004.3';
-- absolute value
SELECT '' AS five, f.f1, @f.f1 AS abs_f1
FROM FLOAT8_TBL f;
-- truncate
SELECT '' AS five, f.f1, trunc(f.f1) AS trunc_f1
FROM FLOAT8_TBL f ORDER BY 2;
-- round
SELECT '' AS five, f.f1, round(f.f1) AS round_f1
FROM FLOAT8_TBL f ORDER BY 2;
-- ceil / ceiling
select ceil(f1) as ceil_f1 from float8_tbl f ORDER BY 1;
select ceiling(f1) as ceiling_f1 from float8_tbl f ORDER BY 1;
-- floor
select floor(f1) as floor_f1 from float8_tbl f ORDER BY 1;
-- sign
select sign(f1) as sign_f1 from float8_tbl f ORDER BY 1;
-- square root
SELECT sqrt(float8 '64') AS eight;
SELECT |/ float8 '64' AS eight;
SELECT '' AS three, f.f1, |/f.f1 AS sqrt_f1
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0' ORDER BY 2;
-- power
SELECT power(float8 '144', float8 '0.5');
-- take exp of ln(f.f1)
SELECT '' AS three, f.f1, exp(ln(f.f1)) AS exp_ln_f1
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0' ORDER BY 2;
-- cube root
SELECT ||/ float8 '27' AS three;
SELECT '' AS five, f.f1, ||/f.f1 AS cbrt_f1 FROM FLOAT8_TBL f ORDER BY 2;
SELECT '' AS five, f1 FROM FLOAT8_TBL ORDER BY 2;
UPDATE FLOAT8_TBL
SET f1 = FLOAT8_TBL.f1 * '-1'
WHERE FLOAT8_TBL.f1 > '0.0';
SELECT '' AS bad, f.f1 * '1e200' from FLOAT8_TBL f;
SELECT '' AS bad, f.f1 ^ '1e200' from FLOAT8_TBL f;
SELECT 0 ^ 0 + 0 ^ 1 + 0 ^ 0.0 + 0 ^ 0.5;
SELECT '' AS bad, ln(f.f1) from FLOAT8_TBL f where f.f1 = '0.0' ;
SELECT '' AS bad, ln(f.f1) from FLOAT8_TBL f where f.f1 < '0.0' ;
SELECT '' AS bad, exp(f.f1) from FLOAT8_TBL f;
SELECT '' AS bad, f.f1 / '0.0' from FLOAT8_TBL f;
SELECT '' AS five, f1 FROM FLOAT8_TBL ORDER BY 2;
-- test for over- and underflow
INSERT INTO FLOAT8_TBL(f1) VALUES ('10e400');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-10e400');
INSERT INTO FLOAT8_TBL(f1) VALUES ('1e309');
INSERT INTO FLOAT8_TBL(f1) VALUES ('10e-400');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-10e-400');
INSERT INTO FLOAT8_TBL(f1) VALUES ('1e-324');
INSERT INTO FLOAT8_TBL(f1) VALUES ('1e308');
INSERT INTO FLOAT8_TBL(f1) VALUES ('1e-323');
INSERT INTO FLOAT8_TBL(f1) VALUES ('+INFINITY'::float8);
INSERT INTO FLOAT8_TBL(f1) VALUES ('+InFiNiTY'::float8);
INSERT INTO FLOAT8_TBL(f1) VALUES ('+Inf'::float8);
INSERT INTO FLOAT8_TBL(f1) VALUES ('-INFINITY'::float8);
INSERT INTO FLOAT8_TBL(f1) VALUES ('-InFiNiTY'::float8);
INSERT INTO FLOAT8_TBL(f1) VALUES ('-Inf'::float8);
INSERT INTO FLOAT8_TBL(f1) VALUES ('NaN'::float8);
INSERT INTO FLOAT8_TBL(f1) VALUES ('+naN'::float8);
INSERT INTO FLOAT8_TBL(f1) VALUES ('-naN'::float8);
-- test for over- and underflow with update statement
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='1e-324'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='1e309'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='1e-400'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='1e400'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='0.0'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='+INFINITY'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='+InFiNiTY'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='+Inf'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='-INFINITY'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='-Inf'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='NaN'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='+naN'::float8;
UPDATE FLOAT8_TBL SET f1='0.0'::float8 WHERE f1='-naN'::float8;
-- test for over- and underflow with delete statement
DELETE FROM FLOAT8_TBL WHERE f1='1e-324'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='1e309'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='1e400'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='1e-400'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='0.0'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='+INFINITY'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='+InFiNiTY'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='+Inf'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='-INFINITY'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='-Inf'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='-naN'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='+naN'::float8;
DELETE FROM FLOAT8_TBL WHERE f1='NaN'::float8;
-- maintain external table consistency across platforms
-- delete all values and reinsert well-behaved ones
DELETE FROM FLOAT8_TBL;
INSERT INTO FLOAT8_TBL(f1) VALUES ('0.0');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-34.84');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1004.30');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1.2345678901234e+200');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1.2345678901234e-200');
SELECT '' AS five, f1 FROM FLOAT8_TBL ORDER BY 2;
-- test if you can dump/restore subnormal (1e-323) values
-- using COPY
CREATE TABLE FLOATS(a float8);
INSERT INTO FLOATS select 1e-307::float8 / 10^i FROM generate_series(1,16) i;
SELECT * FROM FLOATS ORDER BY a;
SELECT float8in(float8out(a)) FROM FLOATS ORDER BY a;
COPY FLOATS TO '/tmp/floats';
TRUNCATE FLOATS;
COPY FLOATS FROM '/tmp/floats';
SELECT * FROM FLOATS ORDER BY a;
-- test edge-case coercions to integer
SELECT '32767.4'::float8::int2;
SELECT '32767.6'::float8::int2;
SELECT '-32768.4'::float8::int2;
SELECT '-32768.6'::float8::int2;
SELECT '2147483647.4'::float8::int4;
SELECT '2147483647.6'::float8::int4;
SELECT '-2147483648.4'::float8::int4;
SELECT '-2147483648.6'::float8::int4;
SELECT '9223372036854773760'::float8::int8;
SELECT '9223372036854775807'::float8::int8;
SELECT '-9223372036854775808.5'::float8::int8;
SELECT '-9223372036854780000'::float8::int8; | 27.334471 | 77 | 0.687601 |
791786d814e55ce29c9d0912752cbe79bd34cd36 | 525 | dart | Dart | app_dart/lib/src/model/appengine/commit.g.dart | xster/cocoon | c9ef7486a6f820d538c59f991f4d8f6811fe4164 | [
"BSD-3-Clause"
] | null | null | null | app_dart/lib/src/model/appengine/commit.g.dart | xster/cocoon | c9ef7486a6f820d538c59f991f4d8f6811fe4164 | [
"BSD-3-Clause"
] | null | null | null | app_dart/lib/src/model/appengine/commit.g.dart | xster/cocoon | c9ef7486a6f820d538c59f991f4d8f6811fe4164 | [
"BSD-3-Clause"
] | null | null | null | // GENERATED CODE - DO NOT MODIFY BY HAND
// ignore_for_file: always_specify_types, implicit_dynamic_parameter
part of 'commit.dart';
// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************
Map<String, dynamic> _$SerializableCommitToJson(SerializableCommit instance) =>
<String, dynamic>{
'Key': const KeyConverter().toJson(instance.key),
'Checklist': instance.facade,
};
| 32.8125 | 79 | 0.518095 |
a42dab7c8e42f61e894f0551ae2f39fdc0271df7 | 3,136 | php | PHP | resources/views/map/edat.blade.php | muhamadanjar/rth | 435e34222f46ce9ae44e41d9ee4f381339130e59 | [
"MIT"
] | null | null | null | resources/views/map/edat.blade.php | muhamadanjar/rth | 435e34222f46ce9ae44e41d9ee4f381339130e59 | [
"MIT"
] | null | null | null | resources/views/map/edat.blade.php | muhamadanjar/rth | 435e34222f46ce9ae44e41d9ee4f381339130e59 | [
"MIT"
] | null | null | null | <!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<!--The viewport meta tag is used to improve the presentation and behavior of the samples
on iOS devices-->
<meta name="viewport" content="initial-scale=1, maximum-scale=1,user-scalable=no">
<title>SanFrancisco311 - Incidents</title>
<link rel="stylesheet" href="http://js.arcgis.com/3.13/esri/css/esri.css">
<style>
html, body { height: 100%; width: 100%; margin: 0; overflow: hidden; }
#map { height: 100%; padding: 0;}
#footer { height: 2em; text-align: center; font-size: 1.1em; padding: 0.5em; }
.dj_ie .infowindow .window .top .right .user .content { position: relative; }
.dj_ie .simpleInfoWindow .content {position: relative;}
</style>
<script src="http://js.arcgis.com/3.13/"></script>
<script>
var map;
require([
"esri/map",
"esri/layers/FeatureLayer",
"esri/dijit/editing/AttachmentEditor",
"esri/config",
"dojo/parser", "dojo/dom",
"dijit/layout/BorderContainer", "dijit/layout/ContentPane", "dojo/domReady!"
], function(
Map, FeatureLayer, AttachmentEditor, esriConfig,
parser, dom
) {
parser.parse();
// a proxy page is required to upload attachments
// refer to "Using the Proxy Page" for more information: https://developers.arcgis.com/javascript/jshelp/ags_proxy.html
esriConfig.defaults.io.proxyUrl = "/proxy/";
map = new Map("map", {
basemap: "streets",
center: [-122.427, 37.769],
zoom: 17
});
map.on("load", mapLoaded);
function mapLoaded() {
var featureLayer = new FeatureLayer("http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/SanFrancisco/311Incidents/FeatureServer/0",{
mode: FeatureLayer.MODE_ONDEMAND
});
map.infoWindow.setContent("<div id='content' style='width:100%'></div>");
map.infoWindow.resize(350,200);
var attachmentEditor = new AttachmentEditor({}, dom.byId("content"));
attachmentEditor.startup();
featureLayer.on("click", function(evt) {
var objectId = evt.graphic.attributes[featureLayer.objectIdField];
map.infoWindow.setTitle(objectId);
attachmentEditor.showAttachments(evt.graphic,featureLayer);
map.infoWindow.show(evt.screenPoint, map.getInfoWindowAnchor(evt.screenPoint));
});
map.addLayer(featureLayer);
}
});
</script>
</head>
<body>
<div data-dojo-type="dijit/layout/BorderContainer"
data-dojo-props="design:'headline'"
style="width:100%;height:100%;">
<div id="map"
data-dojo-type="dijit/layout/ContentPane"
data-dojo-props="region:'center'"></div>
<div id="footer"
data-dojo-type="dijit/layout/ContentPane"
data-dojo-props="region:'bottom'">
Click point to view/create/delete attachments.
</div>
</div>
</body>
</html> | 36.465116 | 150 | 0.607143 |
dd80022805d52f13378334935db0963f802b7513 | 4,671 | java | Java | src/main/java/org/fmgroup/mediator/common/UtilClass.java | xue-xy/Mediator_origin | 431c04e07266a7976ccac31d542fd594b2b27371 | [
"MIT"
] | 1 | 2021-05-21T14:03:03.000Z | 2021-05-21T14:03:03.000Z | src/main/java/org/fmgroup/mediator/common/UtilClass.java | xue-xy/Mediator_origin | 431c04e07266a7976ccac31d542fd594b2b27371 | [
"MIT"
] | null | null | null | src/main/java/org/fmgroup/mediator/common/UtilClass.java | xue-xy/Mediator_origin | 431c04e07266a7976ccac31d542fd594b2b27371 | [
"MIT"
] | 2 | 2018-03-09T07:01:08.000Z | 2018-03-09T09:23:48.000Z | package org.fmgroup.mediator.common;
import org.fmgroup.mediator.plugin.generator.Generator;
import org.fmgroup.mediator.plugin.command.Command;
import org.fmgroup.mediator.plugin.Plugin;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
// todo consider using https://github.com/ronmamo/reflections instead
// prevent doing redundant work!
public class UtilClass {
/**
* use cache to improve performance, but only when the classes do not change
* dynamically
*/
private static Map<Class, Map<String, List<Class>>> cache = new HashMap<>();
private static String getClassRoot() {
try {
return URLDecoder.decode(
ToolInfo.getSystemRootPath(),
"utf-8"
);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
return null;
}
/**
* get all implemention classes of <b>_interface</b> in the root package, i.e. the whole
* directory in classloader
* @param _interface
* @return
*/
public static List<Class> getImplementation(Class _interface) {
return getImplementation(_interface, "");
}
/**
* get all implemention classes of <b>_interface</b> in package <b>pkgname</b>
* <b>NOTE</b> only this getImplementation function is utilized with cache
* @param _interface
* @param pkgname
* @return
*/
public static List<Class> getImplementation(Class _interface, String pkgname) {
// use cache to enhance performance
if (cache.containsKey(_interface) && cache.get(_interface).containsKey(pkgname))
return cache.get(_interface).get(pkgname);
Path searchPath = Paths.get(
getClassRoot(),
pkgname.replace(".", "/")
);
File pkg = searchPath.toFile();
assert pkg.isDirectory();
List<Class> classes = getImplementation(_interface, pkg);
if (!cache.containsKey(_interface)) cache.put(_interface, new HashMap<>());
cache.get(_interface).put(pkgname, classes);
return classes;
}
public static List<Class> getImplementation(Class _interface, File root) {
List<Class> classes = new ArrayList<>();
for (File f : root.listFiles()) {
if (f.isFile() && f.getName().endsWith(".class")) {
String clsname = f.getPath().substring(0, f.getPath().length() - 6);
if (clsname.startsWith(getClassRoot())) {
clsname = clsname.substring(getClassRoot().length());
}
if (ToolInfo.isWindows()) {
clsname = clsname.replace("\\", ".");
} else {
clsname = clsname.replace("/", ".");
}
try {
if (isExtendedFrom(Class.forName(clsname), _interface)) {
classes.add(Class.forName(clsname));
}
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (NoClassDefFoundError e) {
// just ignore them
}
} else if (f.isDirectory()) {
classes.addAll(getImplementation(_interface, f));
}
}
return classes;
}
public static List<Class<Plugin>> getPlugins() {
List<Class<Plugin>> plugins = new ArrayList<>();
for (Class c: getImplementation(Plugin.class, "org.fmgroup.mediator.plugins")) {
plugins.add(c);
}
return plugins;
}
public static List<Class<Command>> getCommands() {
List<Class<Command>> commands = new ArrayList<>();
for (Class c: getImplementation(Command.class, "org.fmgroup.mediator.plugins")) {
commands.add(c);
}
return commands;
}
public static List<Class<Generator>> getGenerators() {
List<Class<Generator>> generators = new ArrayList<>();
for (Class c : getImplementation(Generator.class, "org.fmgroup.mediator.plugins")) {
generators.add(c);
}
return generators;
}
public static boolean isExtendedFrom(Class sub, Class parent) {
try {
sub.asSubclass(parent);
return true;
} catch (ClassCastException e) {
// this class does not implement the interface
}
return false;
}
}
| 31.993151 | 92 | 0.581674 |
403a9e6c2bffb56e89d4bdb675d9f3d4c1fe8be1 | 2,151 | rb | Ruby | lib/generators/active_record/check_permission_generator.rb | snlkumar/permission | 77a5873c88800f07f8de622a30fcf1dbf58fd7b5 | [
"MIT"
] | null | null | null | lib/generators/active_record/check_permission_generator.rb | snlkumar/permission | 77a5873c88800f07f8de622a30fcf1dbf58fd7b5 | [
"MIT"
] | null | null | null | lib/generators/active_record/check_permission_generator.rb | snlkumar/permission | 77a5873c88800f07f8de622a30fcf1dbf58fd7b5 | [
"MIT"
] | null | null | null | require 'rails/generators/active_record'
require 'generators/check_permission/orm_helpers'
module ActiveRecord
module Generators
class CheckPermissionGenerator < ActiveRecord::Generators::Base
argument :attributes, type: :array, default: [], banner: "field:type field:type"
include CheckPermission::Generators::OrmHelpers
source_root File.expand_path("../templates", __FILE__)
def copy_permission_migration
if (behavior == :invoke && model_exists?) || (behavior == :revoke && migration_exists?(table_name))
# migration_template "migration_existing.rb", "db/migrate/add_permission_to_#{table_name}.rb"
else
migration_template "migration.rb", "db/migrate/permission_create_#{table_name}.rb"
end
end
def generate_model
invoke "active_record:model", [name], migration: false unless model_exists? && behavior == :invoke
end
def generate_model_permission
permission = "Permission"
Rails::Generators.invoke("active_record:model", [permission,"is_read:boolean", "is_update:boolean",
"is_create:boolean", "is_destroy:boolean", "resource_name:string", "#{table_name.singularize}:references"], {migration: true, timestamps: true})
end
def generate_view
create_file Rails.root.join("app", "views", "permissions", "_permissions.html.rb"), "#{partial_content}" unless permission_view_exists?
end
def generate_helper
create_file Rails.root.join("app", "helpers", "permissions_helper.rb"), "
module PermissionsHelper
include CheckPermission
def has_permission
super(params)
end
end
" unless permissions_helper_exists?
end
def inject_permission_content
content = model_contents
class_path = if namespaced?
class_name.to_s.split("::")
else
[class_name]
end
indent_depth = class_path.size - 1
content = content.split("\n").map { |line| " " * indent_depth + line } .join("\n") << "\n"
inject_into_class(model_path, class_path.last, content) if model_exists?
end
def migration_data
<<RUBY
t.string :email
t.string :name
RUBY
end
end
end
end | 35.262295 | 153 | 0.702929 |
b94b2fc8c9c6020042fe1e1788c104320995b039 | 1,633 | php | PHP | Workflow/src/nodes/variables/increment.php | real-chocopanda/ezComponents-lite | 8d99c82699994d2532fa2b43f3c56e1d507d8a87 | [
"BSD-3-Clause"
] | null | null | null | Workflow/src/nodes/variables/increment.php | real-chocopanda/ezComponents-lite | 8d99c82699994d2532fa2b43f3c56e1d507d8a87 | [
"BSD-3-Clause"
] | null | null | null | Workflow/src/nodes/variables/increment.php | real-chocopanda/ezComponents-lite | 8d99c82699994d2532fa2b43f3c56e1d507d8a87 | [
"BSD-3-Clause"
] | null | null | null | <?php
/**
* File containing the ezcWorkflowNodeVariableIncrement class.
*
* @package Workflow
* @version 1.4rc1
* @copyright Copyright (C) 2005-2009 eZ Systems AS. All rights reserved.
* @license http://ez.no/licenses/new_bsd New BSD License
*/
/**
* This node increments a workflow variable by one when executed..
*
* <code>
* <?php
* $inc = new ezcWorkflowNodeVariableIncrement( 'variable name' );
* ?>
* </code>
*
* Incoming nodes: 1
* Outgoing nodes: 1
*
* @package Workflow
* @version 1.4rc1
*/
class ezcWorkflowNodeVariableIncrement extends ezcWorkflowNodeArithmeticBase
{
/**
* The name of the variable to be incremented.
*
* @var string
*/
protected $configuration;
/**
* Perform variable modification.
*/
protected function doExecute()
{
$this->variable++;
}
/**
* Generate node configuration from XML representation.
*
* @param DOMElement $element
* @return string
* @ignore
*/
public static function configurationFromXML( DOMElement $element )
{
return $element->getAttribute( 'variable' );
}
/**
* Generate XML representation of this node's configuration.
*
* @param DOMElement $element
* @ignore
*/
public function configurationToXML( DOMElement $element )
{
$element->setAttribute( 'variable', $this->configuration );
}
/**
* Returns a textual representation of this node.
*
* @return string
* @ignore
*/
public function __toString()
{
return $this->configuration . '++';
}
}
?>
| 20.935897 | 76 | 0.614819 |
0afba7cc4770e6ed7af8f046b96df8c8ee333ece | 4,371 | cpp | C++ | code/common/maibo/Resources/ResourceManager.cpp | iboB/maibo | df43ddf82b3c79e00f3d2c8b38db181e5edae264 | [
"MIT"
] | 4 | 2015-08-07T09:11:15.000Z | 2018-01-03T15:47:04.000Z | code/common/maibo/Resources/ResourceManager.cpp | iboB/maibo | df43ddf82b3c79e00f3d2c8b38db181e5edae264 | [
"MIT"
] | null | null | null | code/common/maibo/Resources/ResourceManager.cpp | iboB/maibo | df43ddf82b3c79e00f3d2c8b38db181e5edae264 | [
"MIT"
] | null | null | null | // MaiBo
// Copyright(c) 2015 Borislav Stanimirov
//
// Distributed under the MIT Software License
// See accompanying file LICENSE.txt or copy at
// http://opensource.org/licenses/MIT
//
#include "ResourceManager.h"
#include "maibo/Assets/FileManager.h"
#include "maibo/Common/ResourceTask.h"
#include "maibo/Common/ResourceFuture.h"
#include "maibo/Task.h"
#include "maibo/TaskManager.h"
#include "maibo/Rendering/Shader.h"
#include "maibo/Rendering/GPUProgram.h"
#include "maibo/Rendering/Texture.h"
#include <fstream>
using namespace maibo;
using namespace std;
namespace
{
class LoadShaderTask : public ResourceTask<ShaderPtr, vector<char>>
{
public:
LoadShaderTask(const string& shaderName, const ShaderType::Type shaderType, ConstResourceFuturePtr<vector<char>> readFileFuture)
: ResourceTask(readFileFuture)
, m_shaderName(shaderName)
, m_shaderType(shaderType)
{
}
bool safeExecute() override
{
future->resource() = make_shared<Shader>(m_shaderType, m_shaderName);
int error = !future->resource()->load(m_dependentFuture->resource());
future->setErrorCode(error);
future->setProgress(1.f);
future->setDone();
return true;
}
private:
const string m_shaderName;
const ShaderType::Type m_shaderType;
};
}
ResourceFuturePtr<ShaderPtr> ResourceManager::loadShaderAsync(const std::string& path, ShaderType::Type type, bool alsoGetFile)
{
auto future = FileManager::instance().readFileAsync(path, alsoGetFile);
auto task = new LoadShaderTask(path, type, future);
TaskManager::instance().pushTask(task);
return task->future;
}
namespace
{
class LoadGPUProgramTask : public ResourceTask<GPUProgramPtr, ShaderPtr>
{
public:
LoadGPUProgramTask(const string& programName, ConstResourceFuturePtr<ShaderPtr> vsFuture, ConstResourceFuturePtr<ShaderPtr> fsFuture)
: ResourceTask(fsFuture)
, m_programName(programName)
, m_vsFuture(vsFuture)
, m_fsFuture(fsFuture)
{
}
bool safeExecute() override
{
if (!m_vsFuture->isDone())
{
return false;
}
assert(m_fsFuture->isDone());
future->resource() = make_shared<GPUProgram>(m_programName);
future->resource()->attachShader(m_vsFuture->resource());
future->resource()->attachShader(m_fsFuture->resource());
int error = !future->resource()->link();
future->setErrorCode(error);
future->setProgress(1.f);
future->setDone();
return true;
}
private:
const string m_programName;
ConstResourceFuturePtr<ShaderPtr> m_vsFuture;
ConstResourceFuturePtr<ShaderPtr> m_fsFuture;
};
}
ResourceFuturePtr<GPUProgramPtr> ResourceManager::loadGPUProgramAsync(const std::string& vertexShaderPath, const std::string& fragmentShaderPath, bool alsoGetFiles)
{
auto vsFuture = loadShaderAsync(vertexShaderPath, ShaderType::Vertex, alsoGetFiles);
auto fsFuture = loadShaderAsync(fragmentShaderPath, ShaderType::Fragment, alsoGetFiles);
auto task = new LoadGPUProgramTask(vertexShaderPath + fragmentShaderPath, vsFuture, fsFuture);
TaskManager::instance().pushTask(task);
return task->future;
}
namespace
{
class LoadTextureTask : public ResourceTask<TexturePtr, int>
{
public:
LoadTextureTask(const string& textureName, ConstResourceFuturePtr<int> getFileFuture)
: ResourceTask(getFileFuture)
, m_textureName(textureName)
{
}
bool safeExecute() override
{
future->resource() = make_shared<Texture>(m_textureName);
auto success = future->resource()->loadFromFile(m_textureName.c_str());
future->setErrorCode(!success);
future->setProgress(1.f);
future->setDone();
return true;
}
private:
const string m_textureName;
};
}
ResourceFuturePtr<TexturePtr> ResourceManager::loadTexture(const std::string& path, bool alsoGetFile)
{
auto future = FileManager::instance().getFileAsync(path);
auto task = new LoadTextureTask(path, future);
TaskManager::instance().pushTask(task);
return task->future;
} | 28.2 | 164 | 0.668268 |
b567c04613ba8741341589c6a821f01d602f97ab | 107 | jbuilder | Ruby | app/views/tt/api/v1/split_details/show.json.jbuilder | HipsterBrown/test_track_rails_client | 10abd4ae94d635bbe6171a753ca155922c316d9b | [
"MIT"
] | 39 | 2016-10-25T22:05:38.000Z | 2020-10-01T16:41:56.000Z | app/views/tt/api/v1/split_details/show.json.jbuilder | HipsterBrown/test_track_rails_client | 10abd4ae94d635bbe6171a753ca155922c316d9b | [
"MIT"
] | 110 | 2016-10-26T02:20:26.000Z | 2021-05-12T20:04:28.000Z | app/views/tt/api/v1/split_details/show.json.jbuilder | bymealpal/test_track_rails_client | 91d514d22c90064230f76686cab78ec62a453702 | [
"MIT"
] | 22 | 2016-10-27T14:49:15.000Z | 2020-09-03T13:29:02.000Z | json.(@split_detail, :name, :hypothesis, :assignment_criteria, :description, :owner, :location, :platform)
| 53.5 | 106 | 0.757009 |
88bc4650d726d168c5b1014a3df5588f17428693 | 1,794 | rs | Rust | raknet/src/protocol/connection_request_accepted.rs | AnvilMC/anvil_bedrock | b22fdfd7aaed7cd9e11eec0aff3a8fad4315d9d8 | [
"Apache-2.0"
] | 3 | 2021-03-29T00:41:29.000Z | 2021-11-26T11:05:07.000Z | raknet/src/protocol/connection_request_accepted.rs | AnvilMC/anvil_bedrock | b22fdfd7aaed7cd9e11eec0aff3a8fad4315d9d8 | [
"Apache-2.0"
] | 2 | 2021-03-29T00:50:14.000Z | 2021-03-29T00:51:03.000Z | raknet/src/protocol/connection_request_accepted.rs | AnvilMC/anvil_bedrock | b22fdfd7aaed7cd9e11eec0aff3a8fad4315d9d8 | [
"Apache-2.0"
] | null | null | null | use std::{convert::TryInto, net::SocketAddr};
use crate::prelude::{Address, RaknetPacket, RaknetPacketData};
use super::ConnectionRequest;
pub struct ConnectionRequestAccepted {
pub client_adress: Address,
pub system_index: i16,
pub internal_ids: [Address; 10],
pub request_time: i64,
pub time: i64,
}
impl RaknetPacket for ConnectionRequestAccepted {
const RANGE: std::ops::Range<u8> = 0x10..0x11;
fn id(&self) -> u8 {
0x10
}
}
impl RaknetPacketData for ConnectionRequestAccepted {
fn decode(reader: &mut impl crate::prelude::Reader) -> Option<Self> {
Some(Self {
client_adress: Address::decode(reader)?,
system_index: i16::decode(reader)?,
internal_ids: <[Address; 10]>::decode(reader)?,
request_time: i64::decode(reader)?,
time: i64::decode(reader)?,
})
}
fn encode(&self, writer: &mut impl crate::prelude::Writer) -> Option<()> {
self.client_adress.encode(writer)?;
self.system_index.encode(writer)?;
self.internal_ids.encode(writer)?;
self.request_time.encode(writer)?;
self.time.encode(writer)
}
}
impl ConnectionRequestAccepted {
pub fn from(req: ConnectionRequest, peer: &SocketAddr) -> Self {
Self {
client_adress: peer.into(),
system_index: 0,
internal_ids: {
let address = Address {
ip: [255; 4],
port: 19132,
};
(0..10)
.map(|_| address.clone())
.collect::<Vec<_>>()
.try_into()
.unwrap()
},
request_time: req.time,
time: req.time,
}
}
}
| 28.03125 | 78 | 0.546823 |
0d432a184be9c8a49eff50e31df672e82eb83a1f | 2,057 | cs | C# | mko.Asp.Mvc.Test/Controllers/MathController.cs | mk-prg-net/mk-prg-net.lib | 1636a12de25ed3dfb1ade4ed6bf2bdb60fc05677 | [
"MIT"
] | null | null | null | mko.Asp.Mvc.Test/Controllers/MathController.cs | mk-prg-net/mk-prg-net.lib | 1636a12de25ed3dfb1ade4ed6bf2bdb60fc05677 | [
"MIT"
] | null | null | null | mko.Asp.Mvc.Test/Controllers/MathController.cs | mk-prg-net/mk-prg-net.lib | 1636a12de25ed3dfb1ade4ed6bf2bdb60fc05677 | [
"MIT"
] | null | null | null | using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using mko.Asp.Mvc.Test.Models;
namespace mko.Asp.Mvc.Test.Controllers
{
public class MathController : Controller
{
protected override void OnException(ExceptionContext filterContext)
{
filterContext.ExceptionHandled = true;
View("Error").ExecuteResult(ControllerContext);
}
//
// GET: /Math/
public ActionResult Index()
{
return View("IndexView");
}
public ActionResult BinOp()
{
return View("BinOpView", new Models.BinOpModel());
}
[HttpPost]
public ActionResult BinOp(Models.BinOpModel binOpModel)
{
if (ModelState.IsValid)
{
switch (binOpModel.Operator)
{
case Models.BinOpModel.Operators.add:
binOpModel.Result = binOpModel.A + binOpModel.B;
break;
case Models.BinOpModel.Operators.sub:
binOpModel.Result = binOpModel.A - binOpModel.B;
break;
case Models.BinOpModel.Operators.mul:
binOpModel.Result = binOpModel.A * binOpModel.B;
break;
//case Models.BinOpModel.Operators.div:
// binOpModel.Result = binOpModel.A / binOpModel.B;
// break;
default:
throw new InvalidOperationException();
}
//// Der Compiler wandelt den folgenden Aufruf in die Form
//// binOpModel.Result = BinOpModelExtensions.executeOp(binOpModel);
//// automatisch um.
//binOpModel.Result = binOpModel.executeOp();
return View("BinOpView", binOpModel);
} return View("BinOpView", binOpModel);
}
}
}
| 30.25 | 84 | 0.518717 |
b0d35e2080fbbf66b79f264964e717b80be5eb09 | 634 | py | Python | goclean.py | JerrieYuan/DrawItforSublimeText | 0ba8acd030a60bacd4a21227673b9af1005cad50 | [
"MIT"
] | 2 | 2016-02-06T02:15:35.000Z | 2016-02-09T06:59:25.000Z | goclean.py | JerrieYuan/DrawItforSublimeText | 0ba8acd030a60bacd4a21227673b9af1005cad50 | [
"MIT"
] | null | null | null | goclean.py | JerrieYuan/DrawItforSublimeText | 0ba8acd030a60bacd4a21227673b9af1005cad50 | [
"MIT"
] | null | null | null | import sublime, sublime_plugin, os
class GocleanCommand(sublime_plugin.TextCommand):
def run(self, edit):
spl = "/"
filename = self.view.file_name()
filename = filename.replace("\\",spl)
gosrcpath = os.path.join(os.getenv("GOPATH"),"src"+spl)
gosrcpath = gosrcpath.replace("\\",spl)
thispkg = filename.replace(gosrcpath,"")
pathlist = thispkg.split(spl)
num = len(pathlist)
thispkg = thispkg.replace(spl+pathlist[num-1],"")
thispkg = os.path.normpath(thispkg)
os.popen("go install "+ thispkg)
print("go clean: go install "+ thispkg)
| 37.294118 | 63 | 0.616719 |
ae69001a60a880ef66603254580a7971a6c7e865 | 8,736 | cs | C# | src/Analyzers/Core/Analyzers/UseIsNullCheck/AbstractUseIsNullForReferenceEqualsDiagnosticAnalyzer.cs | belav/roslyn | 01124c8bbeacb560271261e97c10317114836299 | [
"MIT"
] | null | null | null | src/Analyzers/Core/Analyzers/UseIsNullCheck/AbstractUseIsNullForReferenceEqualsDiagnosticAnalyzer.cs | belav/roslyn | 01124c8bbeacb560271261e97c10317114836299 | [
"MIT"
] | null | null | null | src/Analyzers/Core/Analyzers/UseIsNullCheck/AbstractUseIsNullForReferenceEqualsDiagnosticAnalyzer.cs | belav/roslyn | 01124c8bbeacb560271261e97c10317114836299 | [
"MIT"
] | null | null | null | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using Microsoft.CodeAnalysis.CodeStyle;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.LanguageServices;
namespace Microsoft.CodeAnalysis.UseIsNullCheck
{
internal abstract class AbstractUseIsNullCheckForReferenceEqualsDiagnosticAnalyzer<TLanguageKindEnum>
: AbstractBuiltInCodeStyleDiagnosticAnalyzer where TLanguageKindEnum : struct
{
protected AbstractUseIsNullCheckForReferenceEqualsDiagnosticAnalyzer(
LocalizableString title
)
: base(
IDEDiagnosticIds.UseIsNullCheckDiagnosticId,
EnforceOnBuildValues.UseIsNullCheck,
CodeStyleOptions2.PreferIsNullCheckOverReferenceEqualityMethod,
title,
new LocalizableResourceString(
nameof(AnalyzersResources.Null_check_can_be_simplified),
AnalyzersResources.ResourceManager,
typeof(AnalyzersResources)
)
) { }
public override DiagnosticAnalyzerCategory GetAnalyzerCategory() =>
DiagnosticAnalyzerCategory.SemanticSpanAnalysis;
protected override void InitializeWorker(AnalysisContext context) =>
context.RegisterCompilationStartAction(
compilationContext =>
{
var objectType = compilationContext.Compilation.GetSpecialType(
SpecialType.System_Object
);
if (objectType != null)
{
var referenceEqualsMethod = objectType
.GetMembers(nameof(ReferenceEquals))
.OfType<IMethodSymbol>()
.FirstOrDefault(
m =>
m.DeclaredAccessibility == Accessibility.Public
&& m.Parameters.Length == 2
);
if (referenceEqualsMethod != null)
{
var syntaxKinds = GetSyntaxFacts().SyntaxKinds;
context.RegisterSyntaxNodeAction(
c => AnalyzeSyntax(c, referenceEqualsMethod),
syntaxKinds.Convert<TLanguageKindEnum>(
syntaxKinds.InvocationExpression
)
);
}
}
}
);
protected abstract bool IsLanguageVersionSupported(ParseOptions options);
protected abstract bool IsUnconstrainedGenericSupported(ParseOptions options);
protected abstract ISyntaxFacts GetSyntaxFacts();
private void AnalyzeSyntax(
SyntaxNodeAnalysisContext context,
IMethodSymbol referenceEqualsMethod
)
{
var cancellationToken = context.CancellationToken;
var semanticModel = context.SemanticModel;
var syntaxTree = semanticModel.SyntaxTree;
if (!IsLanguageVersionSupported(syntaxTree.Options))
{
return;
}
var option = context.GetOption(
CodeStyleOptions2.PreferIsNullCheckOverReferenceEqualityMethod,
semanticModel.Language
);
if (!option.Value)
{
return;
}
var invocation = context.Node;
var syntaxFacts = GetSyntaxFacts();
var expression = syntaxFacts.GetExpressionOfInvocationExpression(invocation);
var nameNode = syntaxFacts.IsIdentifierName(expression)
? expression
: syntaxFacts.IsSimpleMemberAccessExpression(expression)
? syntaxFacts.GetNameOfMemberAccessExpression(expression)
: null;
if (!syntaxFacts.IsIdentifierName(nameNode))
{
return;
}
syntaxFacts.GetNameAndArityOfSimpleName(nameNode, out var name, out _);
if (!syntaxFacts.StringComparer.Equals(name, nameof(ReferenceEquals)))
{
return;
}
var arguments = syntaxFacts.GetArgumentsOfInvocationExpression(invocation);
if (arguments.Count != 2)
{
return;
}
if (
!MatchesPattern(syntaxFacts, arguments[0], arguments[1])
&& !MatchesPattern(syntaxFacts, arguments[1], arguments[0])
)
{
return;
}
var symbol = semanticModel.GetSymbolInfo(invocation, cancellationToken).Symbol;
if (!referenceEqualsMethod.Equals(symbol))
{
return;
}
var properties = ImmutableDictionary<string, string>.Empty.Add(
UseIsNullConstants.Kind,
UseIsNullConstants.ReferenceEqualsKey
);
var genericParameterSymbol = GetGenericParameterSymbol(
syntaxFacts,
semanticModel,
arguments[0],
arguments[1],
cancellationToken
);
if (genericParameterSymbol != null)
{
if (genericParameterSymbol.IsValueType)
{
// 'is null' would generate error CS0403: Cannot convert null to type parameter 'T' because it could be a non-nullable value type. Consider using 'default(T)' instead.
// '== null' would generate error CS0019: Operator '==' cannot be applied to operands of type 'T' and '<null>'
// 'Is Nothing' would generate error BC30020: 'Is' operator does not accept operands of type 'T'. Operands must be reference or nullable types.
return;
}
// HasReferenceTypeConstraint returns false for base type constraint.
// IsReferenceType returns true.
if (
!genericParameterSymbol.IsReferenceType
&& !IsUnconstrainedGenericSupported(syntaxTree.Options)
)
{
// Needs special casing for C# as long as
// 'is null' over unconstrained generic is implemented in C# 8.
properties = properties.Add(UseIsNullConstants.UnconstrainedGeneric, "");
}
}
var additionalLocations = ImmutableArray.Create(invocation.GetLocation());
var negated = syntaxFacts.IsLogicalNotExpression(invocation.Parent);
if (negated)
{
properties = properties.Add(UseIsNullConstants.Negated, "");
}
var severity = option.Notification.Severity;
context.ReportDiagnostic(
DiagnosticHelper.Create(
Descriptor,
nameNode.GetLocation(),
severity,
additionalLocations,
properties
)
);
}
private static ITypeParameterSymbol? GetGenericParameterSymbol(
ISyntaxFacts syntaxFacts,
SemanticModel semanticModel,
SyntaxNode node1,
SyntaxNode node2,
CancellationToken cancellationToken
)
{
var valueNode = syntaxFacts.IsNullLiteralExpression(
syntaxFacts.GetExpressionOfArgument(node1)
)
? node2
: node1;
var argumentExpression = syntaxFacts.GetExpressionOfArgument(valueNode);
if (argumentExpression != null)
{
var parameterType =
semanticModel.GetTypeInfo(argumentExpression, cancellationToken).Type;
return parameterType as ITypeParameterSymbol;
}
return null;
}
private static bool MatchesPattern(
ISyntaxFacts syntaxFacts,
SyntaxNode node1,
SyntaxNode node2
) =>
syntaxFacts.IsNullLiteralExpression(syntaxFacts.GetExpressionOfArgument(node1))
&& !syntaxFacts.IsNullLiteralExpression(syntaxFacts.GetExpressionOfArgument(node2));
}
}
| 39.174888 | 187 | 0.555975 |
66471c1bcf910bbfac3a5d1798572d1132da56b2 | 14,811 | py | Python | roverpro/rover_data.py | RoverRobotics/openrover_python | bbbd24596db9f1c4e5a57d92fca048e289b668f0 | [
"BSD-3-Clause"
] | 1 | 2020-05-20T18:43:28.000Z | 2020-05-20T18:43:28.000Z | roverpro/rover_data.py | RoverRobotics/openrover_python | bbbd24596db9f1c4e5a57d92fca048e289b668f0 | [
"BSD-3-Clause"
] | 3 | 2019-04-22T21:48:07.000Z | 2020-06-17T19:10:04.000Z | roverpro/rover_data.py | RoverRobotics/openrover_python | bbbd24596db9f1c4e5a57d92fca048e289b668f0 | [
"BSD-3-Clause"
] | 1 | 2020-08-16T21:40:00.000Z | 2020-08-16T21:40:00.000Z | import abc
import enum
import functools
import re
from typing import NamedTuple, Optional
class ReadDataFormat(abc.ABC):
python_type = None
@abc.abstractmethod
def description(self):
raise NotImplementedError
@abc.abstractmethod
def unpack(self, b: bytes):
raise NotImplementedError
class WriteDataFormat(abc.ABC):
python_type = None
@abc.abstractmethod
def description(self):
raise NotImplementedError
@abc.abstractmethod
def pack(self, value) -> bytes:
raise NotImplementedError
class IntDataFormat(ReadDataFormat, WriteDataFormat):
def __init__(self, nbytes, signed):
self.nbytes = nbytes
self.signed = signed
def description(self):
s = "signed" if self.signed else "unsigned"
n = self.nbytes * 8
return f"{s} integer ({n} bits)"
def pack(self, value):
return int(value).to_bytes(self.nbytes, byteorder="big", signed=self.signed)
def unpack(self, b: bytes):
return int.from_bytes(b, byteorder="big", signed=self.signed)
ROVER_LEGACY_VERSION = 40621
@functools.total_ordering
class RoverFirmwareVersion(NamedTuple):
@classmethod
def parse(cls, a_str):
ver_re = re.compile(r"(\d+(?:[.]\d+){0,2})(?:-([^+])+)?(?:[+](.+))?", re.VERBOSE)
match = ver_re.fullmatch(a_str)
if match is None:
raise ValueError
parts = [int(p) for p in match.group(0).split(".")]
return RoverFirmwareVersion(*parts)
major: int
minor: int = 0
patch: int = 0
build: str = ""
prerelease: str = ""
@property
def value(self):
return self.major * 10000 + self.minor * 100 + self.patch * 10
def __lt__(self, other):
return (self.major, self.minor, self.patch, other.prerelease) < (
other.major,
other.minor,
other.patch,
self.prerelease,
)
def __str__(self):
return (
f"{self.major}.{self.minor}.{self.patch}"
+ (("-" + self.prerelease) if self.prerelease else "")
+ (("+" + self.build) if self.build else "")
)
class DataFormatFirmwareVersion(ReadDataFormat):
python_type = RoverFirmwareVersion
def unpack(self, b):
v = UINT16.unpack(b)
if v == ROVER_LEGACY_VERSION:
return RoverFirmwareVersion(1, 0, 0)
return RoverFirmwareVersion(v // 10000, v // 100 % 100, v % 10)
def description(self):
return (
"XYYZZ, where X=major version, Y=minor version, Z = patch version."
"e.g. 10502 = version 1.05.02. The special value 16421 represents pre-1.3 versions"
)
class DataFormatChargerState(ReadDataFormat, WriteDataFormat):
CHARGER_ACTIVE_MAGIC_BYTES = bytes.fromhex("dada")
CHARGER_INACTIVE_MAGIC_BYTES = bytes.fromhex("0000")
python_type = bool
def pack(self, value):
if value:
return self.CHARGER_ACTIVE_MAGIC_BYTES
else:
return self.CHARGER_INACTIVE_MAGIC_BYTES
def unpack(self, b):
return bytes(b) == self.CHARGER_ACTIVE_MAGIC_BYTES
def description(self):
return "0xDADA if charging, else 0x0000"
class BatteryStatus(enum.Flag):
overcharged_alarm = enum.auto()
terminate_charge_alarm = enum.auto()
over_temp_alarm = enum.auto()
terminate_discharge_alarm = enum.auto()
remaining_capacity_alarm = enum.auto()
remaining_time_alarm = enum.auto()
initialized = enum.auto()
discharging = enum.auto()
fully_charged = enum.auto()
fully_discharged = enum.auto()
class DataFormatBatteryStatus(ReadDataFormat):
python_type = BatteryStatus
def unpack(self, b: bytes):
assert len(b) == 2
as_int = int.from_bytes(b, byteorder="big", signed=False)
result = BatteryStatus(0)
for mask, val in (
(0x8000, BatteryStatus.overcharged_alarm),
(0x4000, BatteryStatus.terminate_charge_alarm),
(0x1000, BatteryStatus.over_temp_alarm),
(0x0800, BatteryStatus.terminate_discharge_alarm),
(0x0200, BatteryStatus.remaining_capacity_alarm),
(0x0100, BatteryStatus.remaining_time_alarm),
(0x0080, BatteryStatus.initialized),
(0x0040, BatteryStatus.discharging),
(0x0020, BatteryStatus.fully_charged),
(0x0010, BatteryStatus.fully_discharged),
):
if as_int & mask:
result |= val
return result
def description(self):
return "bit flags"
class DriveMode(enum.IntEnum):
OPEN_LOOP = 0
CLOSED_LOOP = 1
UINT16 = IntDataFormat(2, False)
INT16 = IntDataFormat(2, True)
UINT8 = IntDataFormat(1, signed=False)
class DataFormatFixedPrecision(ReadDataFormat, WriteDataFormat):
"""A fractional number packed as an integer, but representing a fractional number"""
def __init__(self, base_type, step=1.0, zero=0.0):
self.base_type = base_type
# a change of 1 in the python type corresponds to a change of this many in the base type
self.step = step
# the value of 0 in the python type corresponds to this value in the base type
self.zero = zero
def unpack(self, b: bytes):
n = self.base_type.unpack(b)
return (n - self.zero) / self.step
def pack(self, p):
n = round(p * self.step + self.zero)
return self.base_type.pack(n)
def description(self):
return "fractional (resolution=1/{}, zero={}) stored as {}".format(
self.step, self.zero, self.base_type.description()
)
class DataFormatDriveMode(ReadDataFormat):
python_type = DriveMode
def unpack(self, b: bytes):
return DriveMode(UINT16.unpack(b))
def pack(self, p: DriveMode):
return UINT16.pack(p.value)
def description(self):
return DriveMode.__doc__
OLD_CURRENT_FORMAT = DataFormatFixedPrecision(UINT16, 34)
SIGNED_MILLIS_FORMAT = DataFormatFixedPrecision(INT16, 1000)
UNSIGNED_MILLIS_FORMAT = DataFormatFixedPrecision(UINT16, 1000)
OLD_VOLTAGE_FORMAT = DataFormatFixedPrecision(UINT16, 58)
FAN_SPEED_RESPONSE_FORMAT = DataFormatFixedPrecision(UINT16, 240)
DECIKELVIN_FORMAT = DataFormatFixedPrecision(UINT16, 10, zero=2731.5)
PERCENTAGE_FORMAT = DataFormatFixedPrecision(UINT16, 100)
MOTOR_EFFORT_FORMAT = DataFormatFixedPrecision(UINT8, 125, 125)
CHARGER_STATE_FORMAT = DataFormatChargerState()
FIRMWARE_VERSION_FORMAT = DataFormatFirmwareVersion()
DRIVE_MODE_FORMAT = DataFormatDriveMode()
BATTERY_STATUS_FORMAT = DataFormatBatteryStatus()
class MotorStatusFlag(enum.Flag):
NONE = 0
FAULT1 = enum.auto()
FAULT2 = enum.auto()
DECAY_MODE = enum.auto()
REVERSE = enum.auto()
BRAKE = enum.auto()
COAST = enum.auto()
class DataFormatMotorStatus(ReadDataFormat):
def description(self):
return "motor status bit flags"
def unpack(self, b: bytes):
u = UINT16.unpack(b)
bit_meanings = [
MotorStatusFlag.FAULT1,
MotorStatusFlag.FAULT2,
MotorStatusFlag.DECAY_MODE,
MotorStatusFlag.REVERSE,
MotorStatusFlag.BRAKE,
MotorStatusFlag.COAST,
]
if len(bit_meanings) <= u.bit_length():
raise ValueError("too many bits to unpack")
result = MotorStatusFlag.NONE
for i, flag in enumerate(bit_meanings):
if u & 1 << i:
result |= flag
return result
class DataFormatIgnored(WriteDataFormat):
def description(self):
return f"Ignored data {self.n_bytes} bytes long"
def pack(self, value=None) -> bytes:
assert value is None
return bytes(self.n_bytes)
def __init__(self, n_bytes):
self.n_bytes = n_bytes
class SystemFaultFlag(enum.Flag):
NONE = 0
OVERSPEED = enum.auto()
OVERCURRENT = enum.auto()
class DataFormatSystemFault(ReadDataFormat):
def description(self):
return "System fault bit flags"
def unpack(self, b: bytes):
u = UINT16.unpack(b)
bit_meanings = [SystemFaultFlag.OVERSPEED, SystemFaultFlag.OVERCURRENT]
if len(bit_meanings) <= u.bit_length():
raise ValueError("too many bits to unpack")
result = SystemFaultFlag.NONE
for i, flag in enumerate(bit_meanings):
if u & 1 << i:
result |= flag
return result
class DataElement:
def __init__(
self,
index: int,
data_format: ReadDataFormat,
name: str,
description: str = None,
not_implemented: bool = False,
since: Optional[str] = None,
until: Optional[str] = None,
):
self.index = index
self.data_format = data_format
self.name = name
self.description = description
self.not_implemented = not_implemented
self.since_version = None if since is None else RoverFirmwareVersion.parse(since)
self.until_version = None if until is None else RoverFirmwareVersion.parse(until)
def supported(self, version):
if isinstance(version, str):
v = RoverFirmwareVersion.parse(version)
elif isinstance(version, RoverFirmwareVersion):
v = version
else:
raise TypeError(
f"Expected string or {type(RoverFirmwareVersion)}, but got {type(version)}"
)
if self.not_implemented:
return False
if self.since_version is not None and v < self.since_version:
return False
if self.until_version is not None:
if self.until_version <= v:
return False
return True
elements = [
DataElement(
0, OLD_CURRENT_FORMAT, "battery (A+B) current (external)", "total current from batteries"
),
DataElement(2, UINT16, "left motor speed", not_implemented=True),
DataElement(4, UINT16, "right motor speed", not_implemented=True),
DataElement(
6,
UINT16,
"flipper position 1",
"flipper position sensor 1. 0=15 degrees; 1024=330 degrees;",
),
DataElement(
8,
UINT16,
"flipper position 2",
"flipper position sensor 2. 0=15 degrees; 1024=330 degrees;",
),
DataElement(10, OLD_CURRENT_FORMAT, "left motor current"),
DataElement(12, OLD_CURRENT_FORMAT, "right motor current"),
DataElement(
14,
UINT16,
"left motor encoder count",
"May overflow or underflow. Increments when motor driven forward, decrements backward",
since="1.4",
),
DataElement(
16,
UINT16,
"right motor encoder count",
"May overflow or underflow. Increments when motor driven forward, decrements backward",
since="1.4",
),
DataElement(18, UINT16, "motors fault flag", not_implemented=True),
DataElement(20, UINT16, "left motor temperature"),
DataElement(22, UINT16, "right motor temperature", not_implemented=True),
DataElement(24, OLD_VOLTAGE_FORMAT, "battery A voltage (external)"),
DataElement(26, OLD_VOLTAGE_FORMAT, "battery B voltage (external)"),
DataElement(
28,
UINT16,
"left motor encoder interval",
"0 when motor stopped. Else proportional to motor period (inverse motor speed)",
),
DataElement(
30,
UINT16,
"right motor encoder interval",
"0 when motor stopped. Else proportional to motor period (inverse motor speed)",
),
DataElement(
32,
UINT16,
"flipper motor encoder interval",
"0 when motor stopped. Else proportional to motor period (inverse motor speed)",
not_implemented=True,
),
DataElement(
34,
PERCENTAGE_FORMAT,
"battery A state of charge",
"Proportional charge, 0.0=empty, 1.0=full",
),
DataElement(
36,
PERCENTAGE_FORMAT,
"battery B state of charge",
"Proportional charge, 0.0=empty, 1.0=full",
),
DataElement(38, CHARGER_STATE_FORMAT, "battery charging state"),
DataElement(40, FIRMWARE_VERSION_FORMAT, "release version"),
DataElement(42, OLD_CURRENT_FORMAT, "battery A current (external)"),
DataElement(44, OLD_CURRENT_FORMAT, "battery B current (external)"),
DataElement(46, UINT16, "motor flipper angle"),
DataElement(48, FAN_SPEED_RESPONSE_FORMAT, "fan speed"),
DataElement(50, DRIVE_MODE_FORMAT, "drive mode", until="1.7"),
DataElement(52, BATTERY_STATUS_FORMAT, "battery A status", since="1.2"),
DataElement(54, BATTERY_STATUS_FORMAT, "battery B status", since="1.2"),
DataElement(56, UINT16, "battery A mode", since="1.2"),
DataElement(58, UINT16, "battery B mode", since="1.2"),
DataElement(60, DECIKELVIN_FORMAT, "battery A temperature (internal)", since="1.2"),
DataElement(62, DECIKELVIN_FORMAT, "battery B temperature (internal)", since="1.2"),
DataElement(64, UNSIGNED_MILLIS_FORMAT, "battery A voltage (internal)", since="1.2"),
DataElement(66, UNSIGNED_MILLIS_FORMAT, "battery B voltage (internal)", since="1.2"),
DataElement(
68,
SIGNED_MILLIS_FORMAT,
"battery A current (internal)",
">0 = charging; <0 = discharging",
since="1.2",
),
DataElement(
70,
SIGNED_MILLIS_FORMAT,
"battery B current (internal)",
">0 = charging; <0 = discharging",
since="1.2",
),
DataElement(72, DataFormatMotorStatus(), "left motor status", since="1.7"),
DataElement(74, DataFormatMotorStatus(), "right motor status", since="1.7"),
DataElement(76, DataFormatMotorStatus(), "flipper motor status", since="1.7"),
DataElement(78, FAN_SPEED_RESPONSE_FORMAT, "fan 1 duty", since="1.9"),
DataElement(80, FAN_SPEED_RESPONSE_FORMAT, "fan 2 duty", since="1.9"),
DataElement(82, DataFormatSystemFault(), "system fault flags", since="1.10"),
]
ROVER_DATA_ELEMENTS = {e.index: e for e in elements}
def strike(s):
return f"~~{s}~~"
def doc():
lines = ["| # | Name | Data Type | Description |", "| - | ---- | --------- | ----------- |"]
for de in elements:
lines.append(
"|"
+ "|".join(
[
strike(de.index) if de.not_implemented else de.index,
de.name,
de.data_format.description(),
de.description,
]
)
+ "|"
)
return "\n".join(lines)
if __name__ == "__main__":
print(doc())
def fix_encoder_delta(delta):
MAX_ENCODER = 2 ** 16
delta %= MAX_ENCODER
if delta < MAX_ENCODER / 2:
return delta
else:
return delta - MAX_ENCODER
| 30.85625 | 97 | 0.629735 |
1a3f153b6496e72131bae8cb8f13d77c521410a0 | 42,940 | py | Python | classification/ecrire2.py | kachaloali/m1Stage | 9fd254637dca0aea7f0e930164079fd18c499284 | [
"MIT"
] | null | null | null | classification/ecrire2.py | kachaloali/m1Stage | 9fd254637dca0aea7f0e930164079fd18c499284 | [
"MIT"
] | null | null | null | classification/ecrire2.py | kachaloali/m1Stage | 9fd254637dca0aea7f0e930164079fd18c499284 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import commands
import operator
from optparse import OptionParser
parser = OptionParser(usage="%prog -f FILE, FILE,... -o FILE -l FILE")
parser.add_option("-f", "--files", dest="files",help ="The classification files separated by commas")
parser.add_option("-o", "--out", dest="out",help ="The output file name")
parser.add_option("-i", "--fas", dest="fas",help ="The fasta file name")
parser.add_option("-a", "--attr", dest="attr",help ="The attibutes file PDB")
parser.add_option("-p", "--path", dest="path",help ="Path to programs")
(args, options) = parser.parse_args()
def main(files=args.files,output=args.out,fas=args.fas,attr=args.attr, pathToProg=args.path):
#We retrieve the names of the classification files
if ',' in files: files = files.split(',')
else: files = files.split()
diQueriesSeq, diNewFamily, param = {}, {}, []
diAttrib, diNumSeqAndIdSeq = getIdSeqNumSeqAndColrs(fas,attr)
fastaFileName = fas.replace('.fasta', '')
if os.path.exists(fastaFileName +'_rClassif/'): print commands.getoutput('rm -r '+ fastaFileName +'_rClassif/')
print commands.getoutput('mkdir '+ fastaFileName +'_rClassif/')
####################################################################################################
#We retrieve only the test sequences
for idSeq, comment in diAttrib.items():
if comment == 'black' :
diQueriesSeq[idSeq]=[]
for i in range(len(files)):
diQueriesSeq[idSeq].append([[], []])
#For each file we replace each space with a line break and then retrieve the parameters of the file
for ifile in files:
print commands.getoutput("cat "+ ifile +" | tr \' \' \'\n\' > "+ ifile +'bis')
print commands.getoutput("rm "+ ifile)
print commands.getoutput("mv "+ ifile +'bis '+ ifile)
#looking for the parameters
liste, index1 = [], 0
if "_" in ifile: liste = ifile.split("_")
elem = [ elt for elt in liste if "-classif" in elt ]
for elt in liste:
if "-classif" not in elt: index1 += len(elt) + 1
else: index2 = elt.find('-classif')
index2 += index1
param.append(ifile[index1:index2])
###################################################################################################
"""
Here, if there are several classification files that are submitted, we run through each file and then recence
the information provided. A sequence may be classified according to a classification file, and may not be
classified according to another file. It depends on the parameters used for the construction of these files.
The parameters are those used since the alignment step (paloma)
"""
diFile_concepts, counter = {}, 0
for ifile in files:
fileName, diBlocks, diTriFile, diClassement = ifile, {}, {}, {}
xfile = open(ifile, 'r')
if "/" in fileName:
chemin = fileName.split('/')[1]
else:
chemin = os.getcwd()
lines = xfile.read().split('Answer:')[-1]
for iSeq in diQueriesSeq: diClassement[iSeq] = []
#=========================================================================================
if 'Optimization' in lines: lines = lines.split('Optimization')[0]; print 'Optimisation...'
elif 'Models' in lines: lines = lines.split('Models')[0]; print 'Models...'
#=========================================================================================
bestclassified = list(filter(lambda line: 'bestclassified' in line.strip().split('(') and ',' in line.strip().split('(')[1], lines.split()))
classified = list(filter(lambda line: 'classified' in line.strip().split('(') and ',' in line.strip().split('(')[1], lines.split()))
bestambiguous = list(filter(lambda line: 'bestambiguous' in line.strip().split('(') and ',' in line.strip().split('(')[1], lines.split()))
ambiguous = list(filter(lambda line: 'ambiguous' in line.strip().split('(') and ',' in line.strip().split('(')[1], lines.split()))
unclassified = list(filter(lambda line: 'unclassified' in line.strip().split('('), lines.split()))
new_family = list(filter(lambda line: 'support_new_family' in line, lines.split()))
#=========================================================================================
for line in bestclassified:
idSeq = (line.split(',')[0]).split('(')[1].strip('"')
if idSeq in diQueriesSeq:
diQueriesSeq[idSeq][counter][0].append(line.split(',')[1])
diQueriesSeq[idSeq][counter][1].append('best classified')
diClassement[idSeq].append(line.split(',')[1])
diTriFile[idSeq] = 6
for line in classified:
idSeq = (line.split(',')[0]).split('(')[1].strip('"')
if idSeq in diQueriesSeq:
diQueriesSeq[idSeq][counter][0].append(line.split(',')[1])
diQueriesSeq[idSeq][counter][1].append('classified')
diClassement[idSeq].append(line.split(',')[1])
diTriFile[idSeq] = 5
for line in bestambiguous:
idSeq = (line.split(',')[0]).split('(')[1].strip('"')
if idSeq in diQueriesSeq:
diQueriesSeq[idSeq][counter][0].append(line.split(',')[1])
diQueriesSeq[idSeq][counter][1].append('best ambiguous')
diClassement[idSeq].append(line.split(',')[1])
diTriFile[idSeq] = 3
for line in ambiguous:
idSeq = (line.split(',')[0]).split('(')[1].strip('"')
if idSeq in diQueriesSeq:
diQueriesSeq[idSeq][counter][0].append(line.split(',')[1])
diQueriesSeq[idSeq][counter][1].append('ambiguous')
diClassement[idSeq].append(line.split(',')[1])
diTriFile[idSeq] = 2
for line in unclassified:
idSeq = (line.split('("')[1]).strip('")')
if idSeq in diQueriesSeq:
diQueriesSeq[idSeq][counter][0].append('unclassified')
diQueriesSeq[idSeq][counter][1].append('')
diClassement[idSeq].append('unclassified')
diTriFile[idSeq] = 1
##################################################################################################
#Search for comcepts, associated blocks & associated sequences
members_new = list(filter(lambda line: 'membernew(' in line, lines.split()))
blocks_new = list(filter(lambda line: 'blocknew(' in line, lines.split()))
test_quality = ['best classified', 'classified', 'best ambiguous', 'ambiguous', 'unclassified']
diConcept = {}
for line in new_family:
numConcept, iBlocks, iSeqs, infosConcept = (line.split('(')[1]).split(',')[0], [], [], []
#The blocks members of the concept per file
blocks_of_concept = list(filter(lambda line: 'blocknew('+numConcept+',' in line,blocks_new))
for iline in blocks_of_concept:
numBlock = iline.split(',')[1].strip(')')
iBlocks.append(numBlock)
infosConcept.append(iBlocks)
#The sequences members of the concept per file
members_new_concept = list(filter(lambda line: ','+ numConcept +')' in line, members_new))
for iline in members_new_concept:
idSeq = iline.split('(')[1].split(',')[0].strip('"')
#If the sequence is among the queries sequences
if idSeq in diQueriesSeq:
iSeqs.append(idSeq)
diQueriesSeq[idSeq][counter][0].append('new('+ numConcept +')')
if len(diQueriesSeq[idSeq][counter][1]) == 0:
diClassement[idSeq].append('new('+ numConcept +')')
diTriFile[idSeq] = 4
infosConcept.append(iSeqs)
diConcept[numConcept] = infosConcept
diFile_concepts['File_'+str(counter+1)] = diConcept
##################################################################################################
#Here we find the exceptions seauences ('except') if they exist.
for idSeq in diQueriesSeq:
if len(diQueriesSeq[idSeq][counter][0]) == 0:
diQueriesSeq[idSeq][counter][0].append('except')
diClassement[idSeq].append('except')
diTriFile[idSeq] = 0
#Sorting the dictionary in descending order
diTriFile = sorted(diTriFile.iteritems(), reverse=True, key=operator.itemgetter(1))
if "/" in fileName:
outPutFile=open(fastaFileName+'_rClassif/'+fileName.split('/')[2].replace('classif-out.lp','res')+'.csv','w')
else:
outPutFile=open(fastaFileName+'_rClassif/'+fileName.replace('classif-out.lp','res')+'.csv','w')
outPutFile.write('File: '+fastaFileName+', param: '+ param[counter]+'\n\n\n')
outPutFile.write('sequences , subfamily , quality \n\n'.upper())
#Writing results for each input classification file
for i in range(len(diTriFile)):
idSeq = diTriFile[i][0]
outPutFile.write(idSeq+ ',')
for Class in list(set(diClassement[idSeq])) : outPutFile.write(Class + ' ')
outPutFile.write(','+ str(diTriFile[i][1]))
outPutFile.write('\n')
xfileName = chemin+"/"+fastaFileName+"_"+param[counter]+"_plma.dot"
diBlocks = getBlocks(xfileName)
seqAndBlocks = getSeqAndInvolvedInBlocks(diNumSeqAndIdSeq,diBlocks)
#Writing blocks
outPutFile.write('\n\n news families \n\n\n'.upper())
if diConcept != {}:
outPutFile.write("Concepts ,Members,Number of sequences,Number of blocks, interesting blocks\n")
for numConcept, conceptInfos in diConcept.iteritems():
if conceptInfos[1] !=[]:
outPutFile.write(numConcept + ', ,'+ str(len(conceptInfos[1]))
+','+ str(len(conceptInfos[0])) +'\n')
for seq in list(set(conceptInfos[1])):
suite_of_block = ''
for numBlock in list(set(conceptInfos[0])):
if numBlock in seqAndBlocks[seq].keys():
suite_of_block += seqAndBlocks[seq][numBlock]+' '
outPutFile.write(","+ seq +',,,'+ suite_of_block+ "\n")
outPutFile.write('\n')
outPutFile.close()
#Part Coloring PLMA by Families
colorClassify(fas, attr, fileName, diQueriesSeq, diClassement, diConcept, param, counter, pathToProg)
counter += 1
xfile.close()
"""
Writing step in the .csv file of the globals results, each sequence is written in the file with its status i.e
Classified, ambiguous, unclassified etc. The subfamily field indicates the family (s) in which it was classified.
"""
outPutFile = open(fastaFileName+'_rClassif/'+output[:len(output)-4]+'Global'+output[len(output)-4:], 'w')
outPutFile.write('File: '+fastaFileName+'\n\n\n')
outPutFile.write(' sequences , parameters , subfamily , quality \n\n'.upper())
for idSeq, infosSeq in diQueriesSeq.iteritems():
outPutFile.write(idSeq)
i = 0
for liste in infosSeq:
outPutFile.write(',' + param[i] + ',')
for Class in list(set(liste[0])) : outPutFile.write(Class + ' ')
if len(liste[1]) > 0:
outPutFile.write(',' + liste[1][0] + '\n')
else: outPutFile.write(', ' + '\n')
i +=1
outPutFile.write('\n')
#For the new family
outPutFile.write('\n\n news families \n\n\n'.upper())
for File, Concept in diFile_concepts.iteritems():
#=======================================================================================
numFile = File[File.find('_')+1:]
xfileName = chemin+"/"+fastaFileName+'_'+param[int(numFile)-1]+'_plma.dot'
diBlocks = getBlocks(xfileName)
seqAndBlocks = getSeqAndInvolvedInBlocks(diNumSeqAndIdSeq,diBlocks)
#=======================================================================================
if Concept != {}:
numFile = File[File.find('_')+1:]
outPutFile.write(File + ": param : " + param[int(numFile) - 1]
+ ",Concepts ,Members,Number of sequences,Number of blocks, interesting blocks\n")
for numConcept, conceptInfos in Concept.iteritems() :
if conceptInfos[1] !=[]:
outPutFile.write(','+ numConcept + ', ,'+ str(len(conceptInfos[1]))
+','+ str(len(conceptInfos[0])) +'\n')
for seq in conceptInfos[1]:
suite_of_block = ''
for numBlock in list(set(conceptInfos[0])):
if numBlock in seqAndBlocks[seq].keys():
suite_of_block +=seqAndBlocks[seq][numBlock]+' '
outPutFile.write(", ,"+ seq +',,,'+ suite_of_block+ "\n")
outPutFile.write('\n')
outPutFile.close()
#########################################################################################################
def getIdSeqNumSeqAndColrs(fas,attr):
"""
This function returns two dictionaries where one of them, the keys are the id of the sequences & the values are
the comments for each sequence. The other dictionary (diNumSeqAndIdSeq) its keys are the numbers of the sequences
in the PLMA file and the values are the identifiers of the corresponding sequences.
"""
with open(fas, 'r') as fFile:
fastaFile=fFile.readlines()
fFile.close()
with open(attr, 'r') as aFile:
attrFile=aFile.readlines()
aFile.close()
diQueriesSeq, diNumSeqAndIdSeq, numSeq = {}, {}, 0
for fLine in fastaFile:
if fLine[0] == '>':
numSeq += 1
if '|' in fLine:
idSeq = fLine.split('|')[1].strip()
else:
idSeq = fLine[1:].strip()
diQueriesSeq[idSeq] = ''
diNumSeqAndIdSeq[str(numSeq)] = idSeq
for aLine in attrFile:
if 'range=' in aLine and 'comments=' in aLine:
borneInf = int(aLine.split('"')[1].split('-')[0])
borneSup = int(aLine.split('"')[1].split('-')[1])
if (borneInf <= numSeq and numSeq <= borneSup):
diQueriesSeq[idSeq] = aLine.split('"')[5]
return diQueriesSeq, diNumSeqAndIdSeq
#################################################################################################
def getBlocks(dotFile):
"""
This function returns a dictionary of all the PLMA blocks contained in a dot file
"""
with open(dotFile, 'r') as fd:
dotfile = fd.readlines()
subClustersDico = {}
concatDotFile = reduce(lambda line1, line2: line1.strip()+line2.strip(), dotfile)
subClusters = concatDotFile.split('subgraph cluster_')
for subCluster in subClusters[3:]:
subClusterTemp = subCluster.split('{')[1].split('"];')[:-1]
tmp = subClusterTemp[0].strip().split(';')[2]
subClusterTemp[0] = tmp
subClustersDico[subCluster.split('{')[0]] = subClusterTemp
lastSubCluster = subClusters[len(subClusters)-1:]
lastSubClusterTemp = lastSubCluster[0].split('{')[1].split('}')[0].split('"];')[:-1]
tmp = lastSubClusterTemp[0].strip().split(';')[2]
lastSubClusterTemp[0] = tmp
subClustersDico[lastSubCluster[0].split('{')[0]] = lastSubClusterTemp
return subClustersDico
#################################################################################################
def getSeqAndInvolvedInBlocks(diNumSeq, diBlocks):
diSeqBlocks = {}
for numSeq, idSeq in diNumSeq.items():
dico = {}
for numblock, valueBlock in diBlocks.items():
for line in valueBlock:
if '"('+numSeq+', ' in line:
dico[numblock] = line.split('label = "')[1]
diSeqBlocks[idSeq] = dico
return diSeqBlocks
##################################################################################################
def getNumSeqAndColrs(attribFile):
"""
This function will make it possible to recover the sequence numbers and the color of their families
"""
attributs = open(attribFile,'r')
dico = {}
for line in attributs.readlines():
if 'range=' in line:
ranger = line.split('"')[1]
borneInf, borneSup = int(ranger.split('-')[0]), int(ranger.split('-')[1])
color = line.split('"')[3]
if borneInf > borneSup:
error = "In the range section, the '-' has to find "
error += "between two numbers, and the first number "
error += "has to be smaller than the second one!"
printError(error)
elif borneInf == borneSup:
numSeq = borneInf
dico[str(numSeq)] = color
else:
for numSeq in range(borneInf, borneSup+1):
dico[str(numSeq)] = color
attributs.close()
return dico
#################################################################################################
def colorClassify(fas, attr, fileName, diQueriesSeq, diClassement, diConcept, param, counter, pathToProg):
fastaFileName = fastaFileName = fas.replace('.fasta', '')
plma_seq1, plma_seq2 = getIdSeqNumSeqAndColrs(fas, attr)
known_family = [family for family in list(set(plma_seq1.values())) if family != 'black']
plma_seq3 = getNumSeqAndColrs(attr)
colorNewFamily = "burlywood"
colorAmbiguous = "olive"
colorUnclassified = "black"
diColor_of_family ={}
for family in known_family:
colors = []
for numSeq in plma_seq3:
if plma_seq1[plma_seq2[numSeq]] == family.upper():
colors.append(plma_seq3[numSeq])
diColor_of_family[family] = list(set(colors))
colored_seq_by_family = {}
for numSeq in plma_seq3:
if plma_seq1[plma_seq2[numSeq]] != colorUnclassified:
colored_seq_by_family[numSeq] = []
colored_seq_by_family[numSeq].append(plma_seq3[numSeq])
plma_seq2_temp = dict([[v,k] for v,k in plma_seq2.items()])
#Inverting a dictionary
invert_dict = dict([[v,k] for k,v in plma_seq2.items()])
plma_seq2 = invert_dict
for idSeq in plma_seq1:
if idSeq in diClassement:
numSeq = plma_seq2[idSeq]
colored_seq_by_family[numSeq] = []
for family, color_of_family in diColor_of_family.items():
if family.lower() in diClassement[idSeq]:
colored_seq_by_family[numSeq].append(color_of_family[0])
colored_seq_by_family_tmp = dict([[cle,val] for cle,val in colored_seq_by_family.items()])
#Give the color "colorNewFamily" for news families
for idSeq in diClassement:
for elem in diClassement[idSeq]:
if "new" in elem:
numSeq = plma_seq2[idSeq]
colored_seq_by_family[numSeq] = []
colored_seq_by_family[numSeq].append(colorNewFamily)
#Give the color "colorAmbiguous" for ambiguous
for numSeq, list_color in colored_seq_by_family.items():
if len(list_color) > 1:
colored_seq_by_family[numSeq] = []
colored_seq_by_family[numSeq].append(colorAmbiguous)
#pools of family
diFamily_by_colors = {}
list_tmp = [ elem[0] for elem in colored_seq_by_family.values() if elem != [] ]
if colorNewFamily in set(list_tmp):
diColor_of_family["new"] = [colorNewFamily]
#Reverse of the dictionary of families and their colors
invert_dict = dict([[v[0].lower(),k] for k,v in diColor_of_family.items()])
diColor_family = invert_dict
#A dictionary is created that contains the colors of the families and all the
#sequences belonging to families
for color_of_family in diColor_of_family.values():
NumSeqs = []
for numSeq, colorSeq in colored_seq_by_family.items():
if colorSeq != [] and colorSeq[0] == color_of_family[0]:
NumSeqs.append(numSeq)
diFamily_by_colors[color_of_family[0]] = NumSeqs
#Other unclassified sequences
unclassified_seqs, list_tmp2 = [], []
list_tmp1 = [ elem for elem in diFamily_by_colors.values()]
for liste in list_tmp1:
for elem in liste:
list_tmp2.append(elem)
list_tmp2 = list(set(list_tmp2))
for numSeq in plma_seq3:
if numSeq not in list_tmp2:
unclassified_seqs.append(numSeq)
#Looking for ambiguous sequences
ambiguous, reste_seqs, diClass = {}, {}, {}
for numSeq, tColor in colored_seq_by_family.items():
if numSeq in unclassified_seqs and tColor != []:
color = tColor[0]
ambiguous[numSeq] = color
elif numSeq in unclassified_seqs:
reste_seqs[numSeq] = colorUnclassified
for numSeq in unclassified_seqs:
color = colored_seq_by_family_tmp[numSeq]
if color != []: color = colored_seq_by_family_tmp[numSeq][0].lower()
else: color = ""
if color != "":
if numSeq in colored_seq_by_family_tmp:
classes = diColor_family[color]
for color in colored_seq_by_family_tmp[numSeq][1:]:
classes += ", " + diColor_family[color.lower()]
diClass[numSeq] = classes
#==================================================================================================================
#==================================================================================================================
dotInFile = "./"+fastaFileName+"_paloma/"+fastaFileName+"_"+param[counter]+"_plma.dot"
dotOutFile = "./"+fastaFileName+"_paloma/"+fastaFileName+"_"+param[counter]+"-col.dot"
#==================================================================================================================
#==================================================================================================================
dic_blocks = {}
lines = open(fileName, "r").readlines()
#Looking for the characteristic blocks for each family
for Class in diColor_of_family:
blocks_support = list(filter(lambda line: 'characteristic_block' in line and Class.lower() in line, lines))
blocks = []
for line in blocks_support:
block = line.split(",")[2].split(")")[0]
blocks.append(block)
dic_blocks[Class] = list(set(blocks))
diChar_blocks = {}
for Class, blocks in dic_blocks.items():
for block in blocks:
diChar_blocks[block] = Class
####################################################################################################################
#Creating of a dictionary that contains all the clusters of the plmadot
dotFile = open(dotInFile, "r").readlines()
subClustersDico, colorsSeq = {}, {}
concatDotFile = reduce(lambda line1, line2: line1.strip()+line2.strip(), dotFile)
subClusters = concatDotFile.split('subgraph cluster_')
for subCluster in subClusters[1:]:
subClusterTemp = subCluster.split('{')[1].split('"];')[:-1]
tmp = subClusterTemp[0].strip().split(';')[2]
subClusterTemp[0] = tmp
subClustersDico[subCluster.split('{')[0]] = subClusterTemp
lastSubCluster = subClusters[len(subClusters)-1:]
lastSubClusterTemp = lastSubCluster[0].split('{')[1].split('}')[0].split('"];')[:-1]
tmp = lastSubClusterTemp[0].strip().split(';')[2]
lastSubClusterTemp[0] = tmp
subClustersDico[lastSubCluster[0].split('{')[0]] = lastSubClusterTemp
infoSeqs = lastSubCluster[0].split('{')[1].split('}')[1].split('];')[:-1]
#===================================================================================================================
#===================================================================================================================
#The Input plmadot file
inputFile = open(dotInFile, "r")
#The output plmadot file
outputFile = open(dotOutFile, "w")
lines = inputFile.readlines()
for index, elem in enumerate(lines):
if "subgraph" in elem:
if elem.strip() == "subgraph cluster_1":
index1 = index
if elem.strip() == "subgraph cluster_2":
index2 = index
if elem.strip() == "subgraph cluster_3":
index3 = index
head = lines[:index1]
cluster1 = lines[index1:index2]
cluster2 = lines[index2:index3]
#The sequences numbers and their labels
diCluster1_tmp = {}
for line in cluster1:
if 'label' in line:
numSeq = line.split(",")[0].split('(')[1]
label = line.split(')"')[1]
diCluster1_tmp[numSeq] = label
diCluster2_tmp = {}
for line in cluster2:
if 'label' in line:
numSeq = line.split(",")[0].split('(')[1]
diCluster2_tmp[numSeq] = line
#===================================================================================================================
#===================================================================================================================
#The head of the dot is written
for line in head:
outputFile.write(line)
#===================================================================================================================
#===================================================================================================================
#Part for cluster 1
for line in cluster1:
if "cluster" in line:
outputFile.write(line)
outputFile.write("{\n")
elif "node" in line:
colorSeq = line.split('color =')[1].strip().split(',')[0]
line = line.replace(colorSeq.strip(), "black")
outputFile.write(line)
elif "style" in line:
style_of_cluster = line.split("style =")[1].split(";")[0]
line = line.replace(style_of_cluster.strip(), "filled")
outputFile.write(line)
#Writing for the sub-families (cluster 1)
i = 1
allNewBlocks = []
for color, NumSeqs in diFamily_by_colors.items():
if color != colorNewFamily:
outputFile.write("subgraph cluster_" + str(i) +"p1 \n")
outputFile.write("{\n")
outputFile.write("label = \"Family: "+ diColor_family[color.lower()] +"\nNumber: "+ str(i) +"\";\n")
outputFile.write("node [shape = record, color = black, fontcolor = black];\n")
for numSeq in NumSeqs:
if plma_seq2_temp[numSeq] in diQueriesSeq:
line = diCluster1_tmp[numSeq].replace("\"];", " [**]\"];")
outputFile.write('"('+numSeq+', 1, 0)"' + line)
else: outputFile.write('"('+numSeq+', 1, 0)"' + diCluster1_tmp[numSeq])
outputFile.write('}\n')
i += 1
#Case for pools of new families (if there are several)
else:
i = 1
for concept, infosConcept in diConcept.iteritems():
outputFile.write("subgraph cluster_new" + str(i) +" \n")
outputFile.write("{\n")
outputFile.write("label = \"Family: "+ diColor_family[color.lower()] + "\nNumber: "+ str(i)
+"\";\n")
outputFile.write("node [shape = record, color = black, fontcolor = black];\n")
for idSeq in infosConcept[1]:
numSeq = plma_seq2[idSeq]
if idSeq in diQueriesSeq:
line = diCluster1_tmp[numSeq].replace("\"];", " [**]\"];")
outputFile.write('"('+numSeq+', 1, 0)"' + line)
else: outputFile.write('"('+numSeq+', 1, 0)"' + diCluster1_tmp[numSeq])
outputFile.write('}\n')
allNewBlocks += list(set(infosConcept[0]))
i += 1
#We add the characteristic blocks of the new families
for bloc in allNewBlocks:
diChar_blocks[bloc] = "new"
#The rest of the sequences (cluster 1)
for line in cluster1:
if 'label' in line: numSeq = line.split(",")[0].split('(')[1]
if numSeq in unclassified_seqs:
color = colored_seq_by_family_tmp[numSeq]
if color != []:
color = colored_seq_by_family_tmp[numSeq][0].lower()
else: color = ""
if color != "":
if numSeq in colored_seq_by_family_tmp:
classes = diColor_family[color]
for color in colored_seq_by_family_tmp[numSeq][1:]:
classes += ", " + diColor_family[color.lower()]
line = line.replace(numSeq+ ':', "[" + classes.upper() +"] "+ numSeq+":")
if plma_seq2_temp[numSeq] in diQueriesSeq:
line = line.replace("\"];", " [**]\"];")
outputFile.write(line)
else:
if plma_seq2_temp[numSeq] in diQueriesSeq:
line = line.replace("\"];", " [**]\"];")
outputFile.write(line)
outputFile.write("}\n")
#=================================================================================================================
#=================================================================================================================
#Part for cluster2
for line in cluster2:
if "cluster" in line:
outputFile.write(line)
outputFile.write("{\n")
elif "node" in line:
colorSeq = line.split('color =')[1].strip().split(',')[0]
line = line.replace(colorSeq.strip(), "black")
outputFile.write(line)
elif "style" in line:
style_of_cluster = line.split("style =")[1].split(";")[0]
line = line.replace(style_of_cluster.strip(), "filled")
outputFile.write(line)
outputFile.write("fontcolor = gray;\n")
#Writing for the sub-families (cluster 2)
i = 1
for color, NumSeqs in diFamily_by_colors.items():
if color != colorNewFamily:
outputFile.write("subgraph cluster_" + str(i) +"p2 \n")
outputFile.write("{\n")
outputFile.write("node [shape = record,style = filled, color = "+color.lower()
+", fontcolor = black];\n")
outputFile.write("color = "+color.lower()+";\n")
for numSeq in NumSeqs:
outputFile.write(diCluster2_tmp[numSeq])
outputFile.write('}\n')
i += 1
else:
i = 1
for concept, infosConcept in diConcept.iteritems():
outputFile.write("subgraph cluster_new" + str(i) +"\n")
outputFile.write("{\n")
outputFile.write("node [shape = record,style = filled, color = "+color.lower()
+", fontcolor = black];\n")
outputFile.write("color = "+color.lower()+";\n")
for idSeq in infosConcept[1]:
numSeq = plma_seq2[idSeq]
outputFile.write(diCluster2_tmp[numSeq])
outputFile.write('}\n')
i += 1
#The rest of the sequences (cluster 2)
for line in cluster2:
if 'label' in line: numSeq = line.split(",")[0].split('(')[1]
if numSeq in unclassified_seqs: outputFile.write(line)
outputFile.write("}\n")
#=================================================================================================================
#=================================================================================================================
#Part for the rest of the clusters (PLMA blocks)
for numCluster, cluster in subClustersDico.items():
if numCluster in diChar_blocks:
outputFile.write("subgraph cluster_"+numCluster+"\n{\n")
outputFile.write("node [shape = record, style = filled, color = yellow, fontcolor = black];\n")
outputFile.write("color = "+diColor_of_family[diChar_blocks[numCluster]][0].lower()+";\n")
for line in cluster:
numSeq = line.split(",")[0].split("(")[1]
outputFile.write(line + "\"];\n")
outputFile.write("}\n")
elif numCluster not in ["1","2"]:
outputFile.write("subgraph cluster_"+numCluster+"\n{\n")
outputFile.write("node [shape = record, style = filled, color = yellow, fontcolor = black];\n")
outputFile.write("color = black;\n")
for line in cluster:
outputFile.write(line+"\"];\n")
outputFile.write("}\n")
#Part for arrows
for line in infoSeqs:
if '->' in line:
numSeqTemp, numSeq = line.split('label = ')[1], ''
if ':' in line:
numSeq = numSeqTemp.split(':')[0].strip('"')
else:
numSeq = numSeqTemp.split(',')[0]
colorSeq = line.split(', color =')[1].strip().split(',')[0]
if numSeq in ambiguous:
line = line.replace("fontsize = 8","fontsize = 15")
line = line.replace("label = " + numSeq+ ',', "label = "+ numSeq +"("+ diClass[numSeq].upper()+")\"")
line = line.replace(colorSeq.strip(), ambiguous[numSeq].lower())
elif numSeq in reste_seqs:
color = plma_seq3[numSeq].lower()
if color != colorUnclassified:
classe = diColor_family[color]
line = line.replace("label = "+ numSeq+ ',', "label = \""+ numSeq+"("+ classe.upper() +")\"")
line = line.replace("fontsize = 8","fontsize = 15")
line = line.replace(colorSeq.strip(), "black")
elif numSeq in colored_seq_by_family:
if numSeq in colored_seq_by_family_tmp and colored_seq_by_family_tmp[numSeq] != []:
color = plma_seq3[numSeq].lower()
line = line.replace("fontsize = 8","fontsize = 15")
if color != colorUnclassified:
classe = diColor_family[color]
line = line.replace("label = "+numSeq+ ',',"label = \""+ numSeq+" ("+ classe.upper() +")\"")
else:
line = line.replace("label = "+numSeq+ ',',"label = \""+ numSeq+" (?)\"")
elif colored_seq_by_family_tmp[numSeq] == []:
color = colored_seq_by_family[numSeq][0]
line = line.replace("fontsize = 8","fontsize = 15")
classe = diColor_family[color]
line = line.replace("label = "+numSeq+ ',',"label = \"" + numSeq+" (?)\"")
line = line.replace(colorSeq.strip(), colored_seq_by_family[numSeq][0].lower())
outputFile.write(line+"];\n")
outputFile.write("}\n")
inputFile.close()
outputFile.close()
#================================================================================================================
#================================================================================================================
#Converting the product dot file to pdf format
print commands.getoutput("python ./"+ pathToProg +"/plmadot2pdf.py -f ./"+fastaFileName+"_paloma/"+fastaFileName+"_"
+ param[counter] +"-col.dot")
print commands.getoutput("rm "+fastaFileName+"_paloma/"+fastaFileName+"_"+param[counter]+"-col.ps")
print commands.getoutput("mv ./"+fastaFileName+"_paloma/"+fastaFileName+"_"+param[counter]+"-col.pdf ./"
+fastaFileName+"_rClassif")
#main
if __name__ == '__main__':
main()
| 57.948718 | 156 | 0.437215 |
a92f1496b7763411e2b31b9ea1e93672271cdaca | 3,050 | css | CSS | css/color/color-purple.css | iogit/silkroutefinalci | 07316dda57f492ef753ed8cd5af75d85a2a1ade2 | [
"MIT"
] | null | null | null | css/color/color-purple.css | iogit/silkroutefinalci | 07316dda57f492ef753ed8cd5af75d85a2a1ade2 | [
"MIT"
] | null | null | null | css/color/color-purple.css | iogit/silkroutefinalci | 07316dda57f492ef753ed8cd5af75d85a2a1ade2 | [
"MIT"
] | null | null | null | a, .vc_main-color, .vc_menu .vc_primary-menu > ul > li > a > i,.vc_contact-top i, .vc_address i, .vc_testimonial .icon-caret-down, .nav-tabs > li.active > a, .nav-tabs > li.active > a:hover, .nav-tabs > li.active > a:focus, .vc_anim .vc_category, .vc_blog-list .blog-row .blog-right h3 a:hover, .vc_pricing-table .price, .vc_twitter-mini .vc_carousel-control a:hover, .vc_twitter-mini .bg .icon-twitter, .nav-tabs > li > a:hover, .nav-tabs > li > a:focus,.vc_google-result .gsc-results .gsc-cursor-box .gsc-cursor > .gsc-cursor-page {
color:#d5217e;
}
button, .vc_btn, .vc_bg-color, .vc_inverted, .vc_read-more, .navbar-inverse .btn-navbar, .vc_testimonial .testimonial-box,.input-append .add-on, .input-prepend .add-on, #vc_login-widget h3,.vc_single-portfolio .vc_carousel-control a,.vc_menu .vc_primary-menu > ul > li div.vc_menu-2-v, .vc_menu .vc_primary-menu > ul > li div.vc_menu-2-h, .vc_menu .vc_primary-menu > ul > li div.vc_mega-menu, .vc_anim .vc_hover a:hover, .tp-bannertimer, .vc_menu-search-wrapper, .vc_login-widget h3, .pagination > .active > a, .pagination > .active > span, .pagination > .active > a:hover, .pagination > .active > span:hover, .pagination > .active > a:focus, .pagination > .active > span:focus, .vc_social-share.vc_one-main-color a:hover, .header-1 .logo a, .header-2 .logo a, header .vc_btn-navbar .btn{
background-color:#d5217e;
}
header .vc_menu div.vc_primary-menu > ul, .vc_history h3,.accordion-heading, .nav-pills > .active > a:hover, .nav-pills > .active > a:focus, .nav-pills > li > a:hover, .vc_anim .vc_category, .vc_history h3, .vc_pager a.selected, .pagination > .active > a, .pagination > .active > span, .pagination > .active > a:hover, .pagination > .active > span:hover, .pagination > .active > a:focus, .pagination > .active > span:focus, .vc_social-share.vc_one-main-color a{
border-color:#d5217e;
}
.nav-tabs > li.active > a, .nav-tabs > li.active > a:hover, .nav-tabs > li.active > a:focus,.nav-pills > li.active > a, .nav-pills > li.active > a:hover, .nav-pills > li.active > a:focus,.nav-tabs > li > a:hover, .nav-tabs > li > a:focus, a.back-top, .vc_blog-list .entry-date,.vc_comments .comment-content, .vc_pricing-table h3, .vc_menu .vc_primary-menu > ul > li.active, .vc_menu .vc_primary-menu > ul > li.active:hover{
border-top-color:#d5217e;
}
header .vc_menu div.vc_primary-menu:before, .vc_table thead tr th{
border-bottom-color: #d5217e;
}
header .vc_primary-menu-wrapper{
background-color:#231d21;
}
.vc_menu-search .vc_menu-search-text{
border-top-color:#b41d6b;
border-right-color:#b41d6b;
}
.vc_sub-menu-bg{
border-top-color:#231d21;
}
header.mode-3 .vc_primary-menu-wrapper{
background :#F7F7F7;
box-shadow: 0 1px 0 #FFF inset;
border-bottom:1px solid #F6F6F6;
}
.vc_google-result .gsc-webResult.gsc-result:hover, .vc_google-result .gsc-results .gsc-imageResult:hover{
border-left-color: #d5217e;
}
.vc_google-result .gsc-results .gsc-cursor-box .gsc-cursor > .gsc-cursor-page.gsc-cursor-current-page{
background-color: #d5217e;
border-color: #d5217e;
}
| 80.263158 | 788 | 0.71377 |
7d2feac86e578379277416be2a214f8e414779f1 | 6,468 | ps1 | PowerShell | CommonScripts/SqlSsisFunctions.ps1 | DrJohnT/devops-your-dwh | 9c06912b7fd82f2a05f3a2416ea73c509deb0ad9 | [
"MIT"
] | 2 | 2019-08-19T19:27:34.000Z | 2019-12-06T15:44:19.000Z | CommonScripts/SqlSsisFunctions.ps1 | DrJohnT/devops-your-dwh | 9c06912b7fd82f2a05f3a2416ea73c509deb0ad9 | [
"MIT"
] | null | null | null | CommonScripts/SqlSsisFunctions.ps1 | DrJohnT/devops-your-dwh | 9c06912b7fd82f2a05f3a2416ea73c509deb0ad9 | [
"MIT"
] | 4 | 2018-12-31T06:51:16.000Z | 2019-08-19T19:29:18.000Z | #####################################################################################################
# Script written by © Dr. John Tunnicliffe, 2015-2018 https://github.com/DrJohnT/devops-your-dwh
# This PowerShell script is released under the MIT license http://www.opensource.org/licenses/MIT
#
# Functions to build and deploy SSIS projects
#####################################################################################################
function Deploy-SsisSolution ([string] $SolutionName = $(throw "Solution name required.") ) {
<#
.SYNOPSIS
Deploys the SSIS packages to the target environment using project deployment mode
#>
try
{
$SolutionFolderPath = Get-SolutionPath($SolutionName);
$SolutionFolderPath = split-path $SolutionFolderPath
$solutionNode = $deployConfig.DeploymentConfig.Solutions.Solution | where Name -EQ $SolutionName;
foreach ($project in $solutionNode.SSIS_Project) {
$projectPath = Join-Path $SolutionFolderPath $project.Project;
Deploy-SsisProject -ProjectPath $projectPath -Project $project.Project -Folder $project.Folder;
}
} catch {
logError -Message "Deploy-SsisSolution Failed to deploy solution $SolutionName Error: $_";
}
}
function Deploy-SsisProject ([string] $projectPath = $(throw "Project path required!"), [string] $project = $(throw "project name required!"), [string] $folder = $(throw "folder name required!") ) {
<#
.SYNOPSIS
Deploys the SSIS project to the target environment using project deployment mode
Must use isdeploymentwizard.exe to deploy SSIS projecs
For isdeploymentwizard.exe command-line options see https://docs.microsoft.com/en-us/sql/integration-services/packages/deploy-integration-services-ssis-projects-and-packages
SSISDB Folder setup with thanks to https://www.hansmichiels.com/2016/11/04/how-to-automate-your-ssis-package-deployment-and-configuration-ssis-series/
#>
try {
$ServerName = Get-SsisServerFromConfig;
$SQLCmdVaribles = Get-SqlCmdVariablesFromConfig -UseServerRoles $false;
$sqlFilePath = Join-Path $SsisDeploySQLScriptPath "CreateSsisDbFolder.sql";
assert(Test-Path($sqlFilePath)) "SQL script CreateSsisDbFolder.sql not exist!"
Run-SqlScriptAgainstServer -ServerName $ServerName -DatabaseName "SSISDB" -SqlFilePath $sqlFilePath -SQLCmdVaribles $SQLCmdVaribles;
$ispacPath = Join-Path $projectPath "bin\$configuration\$project.ispac";
assert(Test-Path($ispacPath)) "SSIS ISPAC does not exist in $ispacPath";
Write-Host "Deploying $project to $folder folder from ispac path $ispacPath" -ForegroundColor Yellow;
# As this is a windows EXE we need to wait for it to end before applying the scripts, so we pipe to Out-Null
exec { &"$SsisDeploymentWizard" /Silent /SourcePath:"$ispacPath" /DestinationServer:"$ServerName" /DestinationPath:"/SSISDB/$folder/$project" | Out-Null }
} catch {
logError -Message "Deploy-SsisProject Failed to deploy SSIS $project Error: $_";
}
}
function Deploy-SsisEnvironments ([string] $SolutionName = $(throw "Solution name required.") ) {
<#
.SYNOPSIS
Create an environment in SSISDB for the solution
#>
try {
$SolutionFolderPath = Get-SolutionPath($SolutionName);
$SolutionFolderPath = split-path $SolutionFolderPath
$solutionNode = $deployConfig.DeploymentConfig.Solutions.Solution | where Name -EQ $SolutionName;
foreach ($project in $solutionNode.SSIS_Project) {
Deploy-SsisEnvironment $project.Project $project.Folder;
}
} catch {
logError -Message "Deploy-SsisEnvironments failed. Error: $_";
}
}
function Deploy-SsisEnvironment ([string] $project = $(throw "project name required!"), [string] $folder = $(throw "folder name required!") ) {
<#
.SYNOPSIS
Create an environment in SSISDB for the project
SSISDB Environment setup with thanks to https://www.hansmichiels.com/2016/11/04/how-to-automate-your-ssis-package-deployment-and-configuration-ssis-series/
#>
try {
$ServerName = Get-SsisServerFromConfig;
$SQLCmdVaribles = Get-SqlCmdVariablesFromConfig -UseServerRoles $false;
$sqlFilePath = Join-Path $SsisDeploySQLScriptPath "CreateSsisDbEnvironment.sql";
assert(Test-Path($sqlFilePath)) "SQL script CreateSsisDbEnvironment.sql not exist!"
Run-SqlScriptAgainstServer -ServerName $ServerName -DatabaseName "SSISDB" -SqlFilePath $sqlFilePath -SQLCmdVaribles $SQLCmdVaribles;
$sqlFilePath = Join-Path $SsisDeploySQLScriptPath "LinkSsisDbEnvToProject.sql";
assert(Test-Path($sqlFilePath)) "SQL script LinkSsisDbEnvToProject.sql not exist!"
Run-SqlScriptAgainstServer -ServerName $ServerName -DatabaseName "SSISDB" -SqlFilePath $sqlFilePath -SQLCmdVaribles $SQLCmdVaribles;
} catch {
logError -Message "Deploy-SsisEnvironment failed. Error: $_";
}
}
function Drop-SsisFolder {
<#
.SYNOPSIS
Drops the SSIS folder
#>
try {
$ServerName = Get-SsisServerFromConfig;
$SQLCmdVaribles = Get-SqlCmdVariablesFromConfig -UseServerRoles $false;
$sqlFilePath = Join-Path $SsisDeploySQLScriptPath "Drop_SsisDb_Folder.sql";
assert(Test-Path($sqlFilePath)) "SQL script $sqlFilePath does not exist!"
Write-Host "Dropping SSIS folder";
Run-SqlScriptAgainstServer -ServerName $ServerName -DatabaseName "SSISDB" -SqlFilePath $sqlFilePath -SQLCmdVaribles $SQLCmdVaribles;
} catch {
logError -Message "Drop-SsisFolder failed to drop folder $folder in SSISDB Error: $_";
}
}
function Invoke-SsisPackage ([string] $SsisPackageName = $(throw "SSIS Package name required!")) {
<#
.SYNOPSIS
Executes an SSIS package in SSISDB
#>
try {
$ServerName = Get-SsisServerFromConfig;
$SQLCmdVaribles = Get-SqlCmdVariablesFromConfig -UseServerRoles $false;
$SQLCmdVaribles += "SsisPackageName=$SsisPackageName";
$sqlFilePath = Join-Path $SsisDeploySQLScriptPath "ExecuteSsisPackage.sql";
assert(Test-Path($sqlFilePath)) "SQL script ExecuteSsisPackage.sql not exist!"
Write-Host "Running SSIS package $SsisPackageName";
Run-SqlScriptAgainstServer -ServerName $ServerName -DatabaseName "SSISDB" -SqlFilePath $sqlFilePath -SQLCmdVaribles $SQLCmdVaribles;
} catch {
logError -Message "Invoke-SsisPackage failed. Error: $_";
}
}
| 45.230769 | 203 | 0.69697 |
cc33636f94aaa0c120d5dd6d06d2b574222f3951 | 56,464 | lua | Lua | pandoc-zotxt.lua | odkr/pandoc-zotxt.lua | 068fe8a63c846b946b711062cd6161767b05f192 | [
"MIT"
] | 33 | 2018-07-24T22:03:01.000Z | 2022-03-23T19:33:12.000Z | pandoc-zotxt.lua | odkr/pandoc-zotxt.lua | 068fe8a63c846b946b711062cd6161767b05f192 | [
"MIT"
] | 9 | 2019-08-01T11:12:11.000Z | 2022-03-29T10:07:20.000Z | pandoc-zotxt.lua | odkr/pandoc-zotxt.lua | 068fe8a63c846b946b711062cd6161767b05f192 | [
"MIT"
] | 2 | 2020-12-21T04:48:12.000Z | 2021-04-12T15:18:48.000Z | ---
-- SYNOPSIS
-- --------
--
-- **pandoc** **-L** *pandoc-zotxt.lua* **-C**
--
--
-- DESCRIPTION
-- -----------
--
-- **pandoc-zotxt.lua** looks up sources of citations in Zotero and
-- adds them either to a document's "references" metadata field or
-- to a bibliography file, where Pandoc can pick them up.
--
-- You cite your sources using "easy citekeys" (provided by *zotxt*) or
-- "Better BibTeX Citation Keys" (provided by Better BibTeX for Zotero).
-- You then then tell **pandoc** to filter your document through
-- **pandoc-zotxt.lua** before processing citations (Zotero must be
-- running). That's all there is to it.
--
-- **pandoc-zotxt.lua** only looks up sources that are defined neither
-- in the "references" metadata field nor in any bibliography file.
--
--
-- BIBLIOGRAPHY FILES
-- ------------------
--
-- If you set the "zotero-bibliography" metadata field to a filename,
-- then **pandoc-zotxt.lua** adds sources to that file, rather than to
-- the "references" metadata field. It also adds the path of that file to
-- the document's "bibliography" metadata field, so that Pandoc picks up
-- the bibliographic data of those sources (you can safely set
-- "zotero-bibliography" and "bibliography" at the same time).
-- This speeds up subsequent processing of the same document, because
-- **pandoc-zotxt.lua** will only fetch those sources from Zotero that
-- are not yet in that file.
--
-- The biblography is stored as a CSL JSON file, so the bibliography
-- file's name must end with ".json".
--
-- **pandoc-zotxt.lua** interprets relative filenames as relative to the
-- directory of the first input file that you pass to **pandoc** or, if you
-- do not pass any input file, as relative to the current working directory.
--
-- **pandoc-zotxt.lua** only ever adds sources to its bibliography file.
-- It does *not* update or delete them. If you want to update the sources
-- in your bibliography file, delete it. **pandoc-zotxt.lua** will then
-- regenerate it from scratch.
--
--
-- EXAMPLE
-- -------
--
-- pandoc -L pandoc-zotxt.lua -C <<EOF
-- See @doe2020Title for details.
-- EOF
--
-- This will look up "doe2020Title" in Zotero.
--
--
-- KNOWN ISSUES
-- ------------
--
-- Zotero v5.0.71 and v5.0.72 fail to handle HTTP requests from user agents
-- that do not set the "User Agent" HTTP header. And **pandoc** does not.
-- As a consequence, **pandoc-zotxt.lua** cannot retrieve data from these
-- versions of Zotero unless you tell **pandoc** to set that header.
--
--
-- CAVEATS
-- -------
--
-- **pandoc-zotxt.lua** is Unicode-agnostic.
--
--
-- SEE ALSO
-- --------
--
-- * [zotxt](https://github.com/egh/zotxt)
-- * [Better BibTeX](https://retorque.re/zotero-better-bibtex/)
--
-- pandoc(1)
--
-- @script pandoc-zotxt.lua
-- @release 1.1.0b
-- @author Odin Kroeger
-- @copyright 2018, 2019, 2020, 2021 Odin Kroeger
-- @license MIT
-- INITIALISATION
-- ==============
--
-- luacheck: allow defined top
-- Built-in functions.
local assert = assert
local error = error
local getmetatable = getmetatable
local next = next
local pairs = pairs
local pcall = pcall
local rawget = rawget
local require = require
local select = select
local setmetatable = setmetatable
local tonumber = tonumber
local tostring = tostring
local type = type
-- Modules.
local io = io
local math = math
local os = os
local package = package
local string = string
local table = table
-- Pandoc.
-- luacheck: push ignore
local pandoc = pandoc
if not pandoc.utils then pandoc.utils = require 'pandoc.utils' end
local PANDOC_STATE = PANDOC_STATE
local PANDOC_SCRIPT_FILE = PANDOC_SCRIPT_FILE
local PANDOC_VERSION = PANDOC_VERSION
-- luacheck: pop
-- luacheck: ignore _ENV
local M = {}
local _ENV = M
-- Shorthands.
local concat = table.concat
local unpack = table.unpack
local stringify = pandoc.utils.stringify
local List = pandoc.List
local MetaInlines = pandoc.MetaInlines
local MetaList = pandoc.MetaList
local Str = pandoc.Str
local Span = pandoc.Span
-- Metadata
-- --------
--- The name of this script.
-- @within Metadata
NAME = 'pandoc-zotxt.lua'
--- The version of this script.
-- @within Metadata
VERSION = '1.1.0b'
-- Operating system
-- ----------------
--- The path segment seperator of the OS.
-- @within File I/O
PATH_SEP = package.config:sub(1, 1)
--- The end of line sequence of the OS.
-- @within File I/O
EOL = '\n'
if PATH_SEP == '\\' then EOL = '\r\n' end
-- Modules
-- -------
do
-- Expression to split a path into a directory and a filename part.
local split_e = '(.-' .. PATH_SEP .. '?)([^' .. PATH_SEP .. ']-)$'
-- Expressions that sanitise directory paths.
local san_es = {
-- Replace '/./' with '/'.
{PATH_SEP .. '%.' .. PATH_SEP, PATH_SEP},
-- Replace a sequence of '/'s with a single '/'.
{PATH_SEP .. '+', PATH_SEP},
-- Remove './' at the beginning of paths.
{'^%.' .. PATH_SEP, ''},
-- Remove trailing '/'s, but not for the root node.
{'(.)' .. PATH_SEP .. '$', '%1'}
}
--- Split a file's path into a directory and a filename part.
--
-- @string path The file's path.
-- @treturn[1] string The directory the file is in.
-- @treturn[1] string The file's name.
-- @treturn[2] nil `nil` if `path` is the empty string ('').
-- @treturn[2] string An error message.
-- @raise An error if the path is the empty string.
-- @within File I/O
function path_split (path)
if path == '' then return nil, 'Path is the empty string ("").' end
local dir, fname = path:match(split_e)
for i = 1, #san_es do dir = dir:gsub(unpack(san_es[i])) end
if dir == '' then dir = '.'
elseif fname == '' then fname = '.' end
assert(dir ~= '')
assert(fname ~= '')
return dir, fname
end
--- Join multiple path segments.
--
-- @string ... Path segments.
-- @treturn string The complete path.
-- @raise An error if no path segments are given or if
-- a path segment is the empty string ('').
-- @within File I/O
function path_join (...)
local segs = {...}
local n = #segs
assert(n > 0, 'No path segments given.')
for i = 1, n do
assert(segs[i] ~= '', 'Path segment is the empty string ("").')
end
local path = concat(segs, PATH_SEP)
for i = 1, #san_es do path = path:gsub(unpack(san_es[i])) end
return path
end
end
do
local script_dir, script_name = path_split(PANDOC_SCRIPT_FILE)
--- The directory the script is in.
-- @within Metadata
SCPT_DIR = script_dir
--- The filename of the script.
-- @within Metadata
SCPT_NAME = script_name
end
do
local repo = NAME .. '-' .. VERSION
local vers = {'5.4', '5.3'}
for i = 1, #vers do
local sub_dir = path_join('share', 'lua', vers[i], '?.lua')
package.path = concat({package.path,
path_join(SCPT_DIR, sub_dir),
path_join(SCPT_DIR, repo, sub_dir)
}, ';')
end
end
local text = require 'text'
local json = require 'lunajson'
-- FUNCTIONS
-- =========
-- Warnings
-- --------
--- A prefix for every message printed with `printf`.
local PRINTF_PREFIX = SCPT_NAME .. ': '
--- Print a message to STDERR.
--
-- Prefixes the message with `PRINTF_PREFIX` and appends `EOL`.
--
-- @string[opt] msg The message.
-- @param ... Arguments to that message (think `string.format`).
-- @within Warnings
function printf (msg, ...)
if not msg then msg = '' end
io.stderr:write(PRINTF_PREFIX, msg:format(...), EOL)
end
--- Print a warning to STDERR.
--
-- Only prints values if `PANDOC_STATE.verbosity` is *not* 'ERROR'.
-- Otherwise the same as `printf`.
--
-- @param ... Takes the same arguments as `printf`.
-- @within Warnings
function warnf (...)
if PANDOC_STATE.verbosity ~= 'ERROR' then printf(...) end
end
-- Tables
-- ------
--- Recursively apply a function to every value of a tree.
--
-- The function is applied to *every* node of the data tree.
-- If a node is a `table`, the function is applied *after* recursion.
--
-- @func func A function that takes a value and returns a new one.
-- Receives the value's key as second argument, if applicable.
-- @param data A data tree.
-- @return `data` with `func` applied.
-- @raise An error if the data is nested too deeply.
-- @within Table manipulation
-- @fixme Mostly untested.
function rmap (func, data, _rd)
if type(data) ~= 'table' then return func(data) end
if not _rd then _rd = 0
else _rd = _rd + 1
end
assert(_rd < 512, 'Too much recursion.')
local ret = {}
local k = next(data, nil)
while k ~= nil do
local v = data[k]
if type(v) == 'table' then v = rmap(func, v, _rd) end
ret[k] = func(v, k)
k = next(data, k)
end
return ret
end
--- Return the keys and the length of a table.
--
-- @tab tab The table.
-- @treturn tab The keys of `tab`.
-- @treturn int `tab`'s length.
-- @within Table manipulation
function keys (tab)
local ks = {}
local n = 0
local k = next(tab, nil)
while k ~= nil do
n = n + 1
ks[n] = k
k = next(tab, k)
end
return ks, n
end
do
local lower = text.lower
--- Recursively convert table keys to lowercase.
--
-- @tab tab The table.
-- @return A copy of `tab` with keys in lowercase.
-- @raise An error if the data is nested too deeply.
-- @within Table manipulation
function lower_keys (tab, _rd)
if not _rd then _rd = 0 end
assert(_rd < 512, 'Too much recursion.')
local ret = {}
for k, v in pairs(tab) do
if type(k) == 'string' then k = lower(k) end
if type(v) == 'table' then v = lower_keys(v, _rd + 1) end
ret[k] = v
end
return ret
end
end
--- Iterate over the keys of a table in a given order.
--
-- @tab tab A table.
-- @func[opt] func A sorting function.
-- If no function is given, sorts by number.
-- @treturn func A *stateful* iterator over `tab`.
-- @within Table manipulation
function sorted_pairs (tab, func)
local ks = keys(tab)
table.sort(ks, func)
local n = 0
local function iter ()
n = n + 1
local k = ks[n]
if k == nil then return end
local v = tab[k]
if v == nil then return end
return k, v
end
return iter, tab, nil
end
-- File I/O
-- --------
--- Check whether a path is absolute.
--
-- @string path A path.
-- @treturn bool `true` if the path is absolute, `false` otherwise.
-- @raise An error if the path is the empty string ('').
-- @within File I/O
function path_is_abs (path)
assert(path ~= '', 'Path is the empty string ("").')
if PATH_SEP == '\\' and path:match '^.:\\' then return true end
return path:match('^' .. PATH_SEP) ~= nil
end
--- Get a directory to use as working directory.
--
-- @treturn string The directory of the first input file
-- or '.' if none was given.
-- @within File I/O
function wd ()
local fname = PANDOC_STATE.input_files[1]
if not fname then return '.' end
assert(type(fname) == 'string')
assert(fname ~= '')
local wd = path_split(fname)
return wd
end
--- Check whether a filename refers to a file.
--
-- @string fname The filename.
-- @treturn bool Whether the file exists.
-- @within File I/O
function file_exists (fname)
assert(fname ~= '', 'Filename is the empty string ("").')
local file, _, errno = io.open(fname)
if errno == 2 then return false end
if file ~= nil then file:close() end
return true
end
do
local rsrc_path = PANDOC_STATE.resource_path
--- Locate a file in Pandoc's resource path.
--
-- Absolute filenames are returned as they are.
--
-- @string fname A filename.
-- @treturn[1] string A filename.
-- @treturn[2] nil `nil` if the file could not be found.
-- @treturn[2] string An error message.
-- @within File I/O
function file_locate (fname)
if not rsrc_path or file_exists(fname) then return fname end
for i = 1, #rsrc_path do
local f = path_join(rsrc_path[i], fname)
if file_exists(f) then return f end
end
return nil, fname .. ': Not found in resource path.'
end
end
--- Read a file.
--
-- @string fname The name of the file.
-- @treturn[1] string The content of the file.
-- @treturn[2] nil `nil` if an error occurred.
-- @treturn[2] string An error message.
-- @treturn[2] int An error number.
-- @within File I/O
function file_read (fname)
local str, err, errno, file, ok
file, err, errno = io.open(fname, 'r')
if not file then return nil, err, errno end
str, err, errno = file:read('a')
if not str then return nil, err, errno end
ok, err, errno = file:close()
if not ok then return nil, err, errno end
return str
end
--- Write data to a file.
--
-- The data is first written to a temporary file,
-- that file is then renamed to the given name.
-- If the file exists, it is overwritten.
-- Tries to print a warning to STDERR if that happens
--
-- @string fname The name of the file.
-- @string ... The data.
-- @treturn[1] bool `true` if the data was written to the given file.
-- @treturn[2] nil `nil` if an error occurred.
-- @treturn[2] string An error message.
-- @treturn[2] int An error number.
-- @within File I/O
function file_write (fname, ...)
assert(fname ~= '', 'Filename is the empty string.')
local tmp, ok, err, errno
tmp, err, errno = tmp_file(path_split(fname), nil)
if not tmp then return nil, err, errno end
ok, err, errno = tmp.file:write(...)
if not ok then return nil, err, errno end
ok, err, errno = tmp.file:flush()
if not ok then return nil, err, errno end
ok, err, errno = tmp.file:close()
if not ok then return nil, err, errno end
if file_exists(fname) then warnf('Updating %s.', fname) end
ok, err, errno = os.rename(tmp.fname, fname)
if not ok then return nil, err, errno end
tmp.fname = nil
return true
end
do
local alnum = {}
do
-- These are the ASCII/UTF-8 ranges for alphanumeric characters.
local ranges = {
{48, 57}, -- 0-9.
{65, 90}, -- A-Z.
{97, 122} -- a-z.
}
-- Populate alnum.
n = 0
for i = 1, #ranges do
local first, last = unpack(ranges[i])
for j = first, last do
n = n + 1
alnum[n] = string.char(j)
end
end
alnum.n = n
end
math.randomseed(os.time())
--- Generate a name for a temporary file.
--
-- Tries to make sure that there is no file with that name already.
--
-- @string[opt] dir A directory to prefix the filename with.
-- Cannot be the empty string ('').
-- @string[optchain='tmp_XXXXXX'] templ A template for the filename.
-- 'X's are replaced with random alphanumeric characters.
-- Must contain at least six 'X's.
-- @treturn[1] string A filename.
-- @treturn[2] nil `nil` if the generated filename is in use.
-- @treturn[2] string An error message.
-- @raise An error if the template or the directory is
-- not a string or the empty string ('').
-- @within File I/O
function tmp_fname (dir, templ)
if templ ~= nil then
assert(type(templ) == 'string')
assert(templ ~= '', 'Template is the empty string.')
else
templ = 'tmp_XXXXXX'
end
if dir ~= nil then
assert(type(dir) == 'string')
assert(dir ~= '', 'Directory is the empty string.')
templ = path_join(dir, templ)
end
local len = #templ
local nxs = 0
for i = 1, len do
if templ:sub(i, i) == 'X' then nxs = nxs + 1 end
end
assert(nxs >= 6, 'Template string must contain at least six "X"s.')
for _ = 1, 1024 do
local fname = ''
for i = 1, len do
local c = templ:sub(i, i)
if c == 'X' then c = alnum[math.random(1, alnum.n)] end
fname = fname .. c
end
if not file_exists(fname) then return fname end
end
return nil, 'Could not find an unused filename.'
end
end
do
local mt = {}
-- Closes and removes the file when its handle is garbage-collected.
--
-- If errors occur, prints them to STDERR.
function mt.__gc (self)
local file = self.file
if not file then return end
local fname = self.fname
if fname then
local ok, err = os.remove(fname)
if not ok then printf(err) end
end
if io.type(file) == 'file' then
local ok, err = file:close()
if not ok then printf(err) end
end
end
--- Create a temporary file.
--
-- The temporary file is removed when its file handle/filename pair is
-- garbage-collected. If the file should *not* be removed, set `fname`
-- to `nil`. Prints errors that occur during garbage collection to STDERR.
-- If you call `os.exit` tell it to close the Lua state, so that Lua runs
-- the garbage collector before exiting the script.
--
-- Tries not to overwrite existing files.
--
-- @param ... Takes the same arguments as `tmp_fname`.
--
-- @treturn[1] {file=FILE*,fname=string} A file handle/filename pair.
-- @treturn[2] nil `nil` if an error occurs.
-- @treturn[2] string An error message.
-- @treturn[2] ?int An error number if the error is an I/O error.
-- @raise See `tmp_fname`.
-- @within File I/O
--
-- @usage
-- do
-- local tmp, ok, err
-- -- Creates the fiele.
-- tmp, err = tmp_file()
-- if tmp then
-- ok, err = tmp.file:write(data)
-- if ok then ok, err = tmp.file:close() end
-- if ok then ok, err = os.rename(tmp.fname, fname) end
-- if ok then
-- -- The temporary file was renamed,
-- -- so disable deletion (which would fail).
-- tmp.fname = nil
-- else
-- print(err)
-- end
-- else
-- print(err)
-- end
-- end
-- -- If writing and renaming the temporary file has not succeeded,
-- -- then the garbage collector will delete the file after
-- -- (not "at") this point.
function tmp_file (...)
local tmp, err, errno
tmp = setmetatable({}, mt)
tmp.fname, err, errno = tmp_fname(...)
if not tmp.fname then return nil, err, errno end
tmp.file, err, errno = io.open(tmp.fname, 'w+')
if not tmp.file then return nil, err, errno end
return tmp
end
end
-- Networking
-- ----------
--- Retrieve data from a URL via an HTTP GET request.
--
-- @string url The URL.
-- @treturn string The MIME type of the HTTP content.
-- @treturn string The HTTP content itself.
-- @raise An error if no data can be retrieved. This error can only be
-- caught in Pandoc v2.11 or later.
-- @within Networking
function url_read (url)
return pandoc.mediabag.fetch(url, '.')
end
-- Converters
-- ----------
--- The list of CSL fields that can be formatted.
--
-- This list is a guess!
--
-- [Appendix IV](https://docs.citationstyles.org/en/stable/specification.html#appendix-iv-variables)
-- of the CSL specification lists all field names.
--
-- @see rconv_html_to_md
-- @within Bibliography files
-- @todo Lookup in Citeproc source code.
CSL_KEYS_FORMATTABLE = {
'abstract', -- The abstract.
'collection-title', -- E.g., a series.
'collection-title-short', -- A short version of the title.
'container-title', -- Publication the item was published in.
'container-title-short', -- A short version of that title.
'original-publisher', -- Original publisher.
'original-publisher-place', -- Place the item was originally published in.
'original-title', -- Original title.
'publisher', -- Publisher.
'publisher-place', -- The city/cities the item was published in.
'reviewed-title', -- Title reviewed in the item.
'title', -- The title.
'title-short', -- A short version of the title.
'short-title', -- Ditto.
}
do
local function esc_bold_italics (char, tail)
return char:gsub('(.)', '\\%1') .. tail
end
local function esc_sup_sub (head, body, tail)
return head:gsub('(.)', '\\%1') .. body .. tail:gsub('(.)', '\\%1')
end
local function esc_brackets (char, tail)
return '\\[' .. char:sub(2, -2) .. '\\]' .. tail
end
-- Pairs of expressions and replacements to escape Markdown.
local esc_es = {
-- Backslashes.
{'(\\+)', '\\%1'},
-- Bold and italics.
-- This escapes liberally, but it is the only way to cover edge cases.
{'(%*+)([^%s%*])', esc_bold_italics},
{'(_+)([^%s_])', esc_bold_italics},
-- Superscript and subscript.
{'(%^+)([^%^%s]*)(%^+)', esc_sup_sub},
{'(~+)([^~%s]+)(~+)', esc_sup_sub},
-- Brackets (spans and links).
{'(%b[])([%({])', esc_brackets}
}
--- Escape Markdown.
--
-- Only escapes Markdown that Pandoc recognises in bibliographic data.
--
-- See <https://pandoc.org/MANUAL.html#specifying-bibliographic-data>.
--
-- @string str A string.
-- @treturn string `str` with Markdown escaped.
-- @within Converters
function esc_md (str)
for i = 1, #esc_es do str = str:gsub(unpack(esc_es[i])) end
return str
end
end
do
local esc = {}
-- Escape Markdown in a string element.
--
-- Works like `esc_md` but for Pandoc string elements.
--
-- @tparam pandoc.Str str A string element.
-- @treturn pandoc.Str A string with Markdown markup escaped.
function esc.Str (str)
str.text = esc_md(str.text)
return str
end
local md = {}
-- Make a function that converts an element to Markdown.
--
-- @string char The Markdown markup character for that element.
-- @treturn func The conversion function.
local function mk_elem_conv_f (char)
return function (elem)
local str = stringify(pandoc.walk_inline(elem, md))
return Str(char .. str .. char)
end
end
md.Emph = mk_elem_conv_f '*'
md.Strong = mk_elem_conv_f '**'
md.Subscript = mk_elem_conv_f '~'
md.Superscript = mk_elem_conv_f '^'
-- Convert <span> elements to Markdown text.
--
-- @tparam pandoc.Span A <span> element.
-- @treturn pandoc.Str The element as Markdown.
function md.Span (span)
local str = stringify(pandoc.walk_inline(span, md))
local attrs = ''
if span.identifier then
local id = stringify(span.identifier)
if id ~= '' then attrs = '#' .. id end
end
if span.classes then
for i = 1, #span.classes do
if attrs ~= '' then attrs = attrs .. ' ' end
attrs = attrs .. '.' .. span.classes[i]
end
end
if span.attributes then
for k, v in pairs(span.attributes) do
if attrs ~= '' then attrs = attrs .. ' ' end
attrs = attrs .. k .. '="' .. v .. '"'
end
end
if attrs ~= '' then str = '[' .. str .. ']{' .. attrs .. '}' end
return Str(str)
end
-- Convert SmallCaps elements to Markdown text.
--
-- @tparam pandoc.SmallCaps A SmallCaps element.
-- @treturn pandoc.Str The element as Markdown.
function md.SmallCaps (sc)
local span = Span(sc.content)
span.attributes.style = 'font-variant: small-caps'
return md.Span(span)
end
--- Convert a Pandoc element to Markdown text.
--
-- Only recognises elements that are permitted in bibliographic data.
--
-- See <https://pandoc.org/MANUAL.html#specifying-bibliographic-data>.
--
-- @tparam pandoc.AstElement elem A Pandoc AST element.
-- @treturn string Markdown text.
-- @within Converters
-- @fixme Untested.
function markdownify (elem)
return stringify(walk(walk(elem, esc), md))
end
end
do
local function spaces (n)
return string.rep(' ', n)
end
--- Generate a YAML representation of some data.
--
-- Uses `EOL` to end lines.
--
-- @param data The data.
-- @int[opt=4] ind How many spaces to indent blocks.
-- @func[optchain] sort_f A function to sort keys of mappings.
-- Defaults to sorting them lexically.
-- @treturn[1] string A YAML string.
-- @treturn[2] nil `nil` if the data cannot be represented in YAML.
-- @treturn[2] string An error message.
-- @raise An error if the data is nested too deeply.
-- @within Converters
-- @fixme Doesn't normalise line breaks within strings.
-- @fixme May or may not be to spec.
function yamlify (data, ind, sort_f, _col, _rd)
if not _rd then _rd = 0 end
assert(_rd < 1024, 'Too much recursion.')
if not ind then ind = 4 end
local t = type(data)
if t == 'number' then
return tostring(data)
elseif t == 'string' then
if tonumber(data) then return data end
return '"' .. data:gsub('(\\)+', '%1\\'):gsub('"', '\\"') .. '"'
elseif t == 'table' then
if not _col then _col = 0 end
local ret = ''
local n = #data
local nkeys = select(2, keys(data))
local sp = spaces(_col)
if n == nkeys then
local col = _col + 2
for i = 1, n do
if i > 1 then ret = ret .. sp end
ret = ret .. '- '
.. yamlify(data[i], ind, sort_f, col, _rd + 1)
if i ~= n then ret = ret .. EOL end
end
else
local i = 0
for k, v in sorted_pairs(data, sort_f) do
i = i + 1
k = tostring(k)
if i > 1 then ret = ret .. sp end
ret = ret .. k .. ':'
local col = _col + ind
if type(v) == 'table' then ret = ret .. EOL .. spaces(col)
else ret = ret .. ' '
end
ret = ret .. yamlify(v, ind, sort_f, col, _rd + 1)
if i ~= nkeys then ret = ret .. EOL end
end
end
return ret
else
return nil, t .. ': Cannot be expressed in YAML.'
end
end
end
do
-- Replace '<sc>...</sc>' pseudo-HTML with <span> tags.
--
-- Zotero supports using '<sc>...</sc>' to set text in small caps.
-- Pandoc throws those tags out.
--
-- @string str A string.
-- @treturn string `str` with `<sc>...</sc>` replaced with <span> tags.
local function conv_sc_to_span (str)
local tmp, n = str:gsub('<sc>', '<span style="font-variant: small-caps">')
if n == 0 then return str end
local ret, m = tmp:gsub('</sc>', '</span>')
if m == 0 then return str end
return ret
end
--- Convert pseudo-HTML to Markdown.
--
-- Only supports the HTML tags that Zotero *and* Pandoc support.
--
-- See <https://pandoc.org/MANUAL.html#specifying-bibliographic-data>
-- and <https://docs.citationstyles.org/en/1.0/release-notes.html#rich-text-markup-within-fields>.
--
-- @string html Text that contains pseudo-HTML tags.
-- @treturn string Text formatted in Markdown.
-- @within Converters
function conv_html_to_md (html)
local sc_replaced = conv_sc_to_span(html)
local doc = pandoc.read(sc_replaced, 'html')
return markdownify(doc)
end
end
do
local keys_formattable = {}
for i = 1, #CSL_KEYS_FORMATTABLE do
keys_formattable[CSL_KEYS_FORMATTABLE[i]] = true
end
local function conv (val, key)
if not keys_formattable[key] or type(val) ~= 'string' then
return val
end
return conv_html_to_md(val)
end
--- Recursively convert pseudo-HTML to Markdown.
--
-- Only changes fields listed in `CSL_KEYS_FORMATTABLE`.
--
-- @tab item A CSL item.
-- @treturn tab The CSL item, with pseudo-HTML replaced with Markdown.
-- @see conv_html_to_md
-- @within Converters
-- @fixme Untested
function rconv_html_to_md (item)
return rmap(conv, item)
end
end
do
function conv (data)
if type(data) ~= 'number' then return data end
return tostring(math.floor(data))
end
--- Recursively convert numbers to strings.
--
-- Also converts floating point numbers to integers. This is needed
-- because all numbers are floating point numbers in JSON, but some
-- versions of Pandoc expect integers.
--
-- @tab data The data.
-- @return A copy of `data` with numbers converted to strings.
-- @raise An error if the data is nested too deeply.
-- @within Converters
function rconv_nums_to_strs (data)
return rmap(conv, data)
end
end
-- zotxt
-- -----
--- The URL to lookup citation data.
--
-- See <https://github.com/egh/zotxt> for details.
--
-- @within zotxt
ZOTXT_BASE_URL = 'http://localhost:23119/zotxt/items?'
--- Types of citation keys.
--
-- See <https://github.com/egh/zotxt> for details.
--
-- @table ZOTXT_KEYTYPES
-- @within zotxt
ZOTXT_KEYTYPES = {
'key', -- Zotero item ID
'betterbibtexkey', -- Better BibTeX citation key
'easykey', -- zotxt easy citekey
}
do
local read = pandoc.read
local decode = json.decode
local base_url = ZOTXT_BASE_URL
local key_ts = ZOTXT_KEYTYPES
-- Retrieve a CSL item (low-level).
--
-- Takes an item ID and a parsing function, queries *zotxt* for that ID,
-- passes whatever *zotxt* returns to the parsing function, and then
-- returns whatever the parsing function returns. The parsing function
-- should raise an error if its argument cannot be interpreted as
-- bibliographic data.
--
-- Tries every citation key type defined in `ZOTXT_KEYTYPES` until the
-- query is successful or no more citation key types are left.
--
-- @func parse_f A function that takes a CSL JSON string,
-- returns a CSL item, and raises an error if, and only if,
-- it cannot interpret the JSON string as a CSL item.
-- @string id An item ID, e.g., 'name:2019word', 'name2019TwoWords'.
-- @treturn[1] table A CSL item.
-- @treturn[2] nil `nil` if an error occurred.
-- @treturn[2] string An error message.
-- @raise An error if no data can be read from *zotxt*.
-- This error can only be caught in Pandoc v2.11 or later.
-- @within zotxt
local function get (parse_f, id)
for i = 1, #key_ts do
-- zotxt supports searching for multiple citation keys at once,
-- but if a single one cannot be found, it replies with a cryptic
-- error message (for easy citekeys) or an empty response
-- (for Better BibTeX citation keys).
local query_url = concat{base_url, key_ts[i], '=', id}
local _, data = url_read(query_url)
if data then
local ok, item = pcall(parse_f, data)
if ok then
if i ~= 1 then
key_ts[1], key_ts[i] = key_ts[i], key_ts[1]
end
return item
end
end
end
return nil, id .. ': Not found.'
end
--- Convert a CSL JSON string to a Lua data structure.
--
-- @string str A CSL JSON string.
-- @return A Lua data structure.
local function conv_json_to_lua (str)
assert(str ~= '')
return rconv_nums_to_strs(decode(str)[1])
end
--- Retrieve a CSL item (for use in bibliography files).
--
-- Returns bibliographic data as a Lua table. The retrieved item can be
-- passed to `biblio_write`; it should *not* be used in the `references`
-- metadata field (unless you are using Pandoc prior to v2.11).
--
-- @string id An item ID, e.g., 'name:2019word', 'name2019TwoWords'.
-- @treturn[1] table A CSL item.
-- @treturn[2] nil `nil` if an error occurred.
-- @treturn[2] string An error message.
-- @raise See `zotxt_get_item`.
-- @within zotxt
function zotxt_get_item_csl (id)
assert(id ~= '', 'ID is the empty string ("").')
local ref, err = get(conv_json_to_lua, id)
if not ref then return nil, err end
ref.id = id
return ref
end
--- Convert a CSL JSON string to Pandoc metadata.
--
-- @string str A CSL JSON string.
-- @treturn pandoc.MetaMap Pandoc metadata.
local function conv_json_to_meta (str)
assert(str ~= '')
return read(str, 'csljson').meta.references[1]
end
--- Retrieve a CSL item (for use in the `references` metadata field).
--
-- Returns bibliographic data as Pandoc metadata. That retrieved item
-- can be used in the `references` metadata field; it should *not* be
-- passed to `biblio_write`.
--
-- @string id An item ID, e.g., 'name:2019word', 'name2019TwoWords'.
-- @treturn[1] pandoc.MetaMap A CSL item.
-- @treturn[2] nil `nil` if an error occurred.
-- @treturn[2] string An error message.
-- @raise See `zotxt_get_item`.
-- @within zotxt
function zotxt_get_item (id)
assert(id ~= '', 'ID is the empty string ("").')
local ref, err, errtype = get(conv_json_to_meta, id)
if not ref then return nil, err, errtype end
ref.id = MetaInlines{Str(id)}
return ref
end
-- (a) The CSL JSON reader is only available in Pandoc v2.11 or later.
-- (b) However, pandoc-citeproc had a (useful) bug and parses formatting
-- tags in metadata fields, so there is no need to treat metadata
-- fields and bibliography files differently before Pandoc v2.11.
-- See <https://github.com/jgm/pandoc/issues/6722> for details.
if not pandoc.types or PANDOC_VERSION < {2, 11} then
zotxt_get_item = zotxt_get_item_csl
end
end
-- Bibliography files
-- ------------------
--- The preferred order of keys in YAML bibliography files.
--
-- [Appendix IV](https://docs.citationstyles.org/en/stable/specification.html#appendix-iv-variables)
-- of the CSL specification lists all field names.
--
-- @see csl_keys_sort
-- @within Bibliography files
CSL_KEY_ORDER = {
'id', -- Item ID.
'type', -- For example, 'paper', 'book'.
'author', -- Author(s).
'recipient', -- Recipient of the document.
'status', -- Publication status (e.g., 'forthcoming').
'issued', -- When the item was published.
'title', -- The title.
'title-short', -- A short version of the title.
'short-title', -- Ditto.
'original-title', -- Original title.
'translator', -- Translator(s).
'editor', -- Editor(s).
'container-title', -- Publication the item was published in.
'container-title-short', -- A short version of that title.
'collection-editor', -- E.g., series editor(s).
'collection-title', -- E.g., a series.
'collection-title-short', -- A short version of the title.
'edition', -- Container's edition.
'volume', -- Volume no.
'issue', -- Issue no.
'page-first', -- First page.
'page', -- Pages or page range *or* number of pages.
'publisher', -- Publisher.
'publisher-place', -- City/cities the item was published in.
'original-publisher', -- Original publisher.
'original-publisher-place', -- Place the item was originally published in.
'doi', -- The DOI.
'pmcid', -- PubMed Central reference number.
'pmid', -- PubMed reference number.
'url', -- The URL.
'accessed', -- When the URL was last accessed.
'isbn', -- The ISBN of the item.
'issn', -- The ISSN of the container.
'call-number', -- Call number (of a library).
'language', -- Language the item is in.
'abstract', -- The abstract.
}
--- A mapping of filename suffices to codecs.
--
-- If a key is not found, it is looked up again in lowercase.
-- @within Bibliography files
BIBLIO_TYPES = {}
do
local mt = {}
function mt.__index (self, key)
if type(key) == 'string' then return rawget(self, key:lower()) end
end
setmetatable(BIBLIO_TYPES, mt)
end
--- Parse BibLaTeX.
-- @within Bibliography files
BIBLIO_TYPES.bib = {}
--- Parse the content of a BibLaTeX file.
--
-- @string str The content of a BibLaTeX file.
-- @treturn[1] {table,...} A list of CSL items
-- if you use Pandoc v2.11 or later.
-- @treturn[2] {{id=string},...} A list of item IDs
-- if you use an earlier version of Pandoc.
-- @within Bibliography files
function BIBLIO_TYPES.bib.decode (str)
local doc = pandoc.read(str, 'biblatex')
if not doc.meta.references then return {} end
return walk(doc.meta.references, {MetaInlines = markdownify})
end
if not pandoc.types or PANDOC_VERSION < {2, 11} then
function BIBLIO_TYPES.bib.decode (str)
local ret = {}
local n = 0
for id in str:gmatch '@%w+%s*{%s*([^%s,]+)' do
n = n + 1
ret[n] = {id=id}
end
return ret
end
end
--- Parse BibTeX.
-- @within Bibliography files
BIBLIO_TYPES.bibtex = {}
--- Parse the content of a BibTeX file
--
-- @string str The content of a BibTeX file.
-- @treturn[1] {table,...} A list of CSL items
-- if you use Pandoc v2.11 or later.
-- @treturn[2] {{id=string},...} A list of item IDs
-- if you use an earlier version of Pandoc.
-- @within Bibliography files
function BIBLIO_TYPES.bibtex.decode (str)
local doc = pandoc.read(str, 'bibtex')
if not doc.meta.references then return {} end
return walk(doc.meta.references, {MetaInlines = markdownify})
end
if not pandoc.types or PANDOC_VERSION < {2, 11} then
BIBLIO_TYPES.bibtex = BIBLIO_TYPES.bib
end
--- De-/Encode CSL items in JSON.
-- @within Bibliography files
BIBLIO_TYPES.json = json
--- De-/Encode CSL items in YAML.
-- @within Bibliography files
BIBLIO_TYPES.yaml = {}
--- Parse a CSL YAML string.
--
-- @string str A CSL YAML string.
-- @treturn tab A list of CSL items.
-- @within Bibliography files
function BIBLIO_TYPES.yaml.decode (str)
local ds = false
for ln in str:gmatch '(.-)\r?\n' do
if ln == '---' then
ds = true
break
end
end
if not ds then str = concat{'---', EOL, str, EOL, '...', EOL} end
local doc = pandoc.read(str, 'markdown')
if not doc.meta.references then return {} end
return walk(doc.meta.references, {MetaInlines = markdownify})
end
--- Serialise a list of CSL items to a YAML string.
--
-- @tab items A list of CSL items.
-- @treturn string A CSL YAML string.
-- @raise See `yamlify`.
-- @within Bibliography files
function BIBLIO_TYPES.yaml.encode (items)
table.sort(items, csl_items_sort)
return yamlify({references=items}, nil, csl_keys_sort)
end
--- Alternative suffix for YAML files.
-- @within Bibliography files
BIBLIO_TYPES.yml = BIBLIO_TYPES.yaml
do
local key_order = {}
for i = 1, #CSL_KEY_ORDER do key_order[CSL_KEY_ORDER[i]] = i end
--- Sorting function for CSL field names.
--
-- Sorts field in the order in which they are listed in `CSL_KEY_ORDER`.
-- Unlisted fields are placed after listed ones in lexical order.
--
-- @string a A CSL fieldname.
-- @string b Another CSL fieldname.
-- @treturn bool Whether `a` should come before `b`.
-- @within Bibliography files
function csl_keys_sort (a, b)
local i, j = key_order[a], key_order[b]
if i and j then return i < j end
if i then return true end
if j then return false end
return a < b
end
end
--- Sorting function for CSL items.
--
-- @tab a A CSL item.
-- @tab b Another CSL item.
-- @treturn bool Whether `a` should come before `b`.
-- @within Bibliography files
function csl_items_sort (a, b)
return a.id < b.id
end
--- Pick the IDs of CSL items out of a list of CSL items.
--
-- @tab items A list of CSL items.
-- @treturn {[string]=true,...} A [set](https://www.lua.org/pil/11.5.html)
-- of item IDs.
-- @raise An error if an item has an ID that cannot be coerced to a string.
-- @within Bibliography files
function csl_items_get_ids (items)
local ids = {}
for i = 1, #items do
local id = items[i].id
local t = type(id)
if t == 'string' then ids[id] = true
elseif t == 'table' then ids[stringify(id)] = true
elseif t ~= 'nil' then error 'Cannot parse ID of item.'
end
end
return ids
end
--- Read a bibliography file.
--
-- The filename suffix determines what format the contents of the file are
-- parsed as. There must be a decoder for that suffix in `BIBLIO_TYPES`.
--
-- @string fname The filename.
-- @treturn[1] tab A list of CSL items.
-- @treturn[2] nil `nil` if an error occurred.
-- @treturn[2] string An error message.
-- @treturn[2] ?int An error number if the error is an I/O error.
-- @within Bibliography files
function biblio_read (fname)
assert(fname ~= '', 'Filename is the empty string')
local suffix = fname:match '%.(%w+)$'
if not suffix then return nil, fname .. ': No filename suffix.' end
local codec = BIBLIO_TYPES[suffix]
if not codec then return nil, fname .. ': Unsupported format.' end
local decode = codec.decode
if not decode then return nil, fname .. ': Cannot parse format.' end
local str, err, errno = file_read(fname)
if not str then return nil, err, errno end
local ok, items = pcall(decode, str)
if not ok then return nil, fname .. ': Parse error.' end
return items
end
--- Write sources to a bibliography file.
--
-- The filename suffix determins what format the data is written as.
-- There must be an encoder for that suffix in `BIBLIO_TYPES`.
-- If the file exists, it is overwritten.
-- Tries to print a warning to STDERR if that happens.
-- Ends every file with `EOL`.
--
-- @string fname A filename.
-- @tab[opt] items A list of CSL items. If no items are given,
-- tests whether data can be written in the corresponding format.
-- @treturn[1] str The filename suffix.
-- @treturn[2] nil `nil` if an error occurred.
-- @treturn[2] string An error message.
-- @treturn[2] ?int An error number if the error is an I/O error.
-- @raise An error if the filename is the empty string.
-- @within Bibliography files
function biblio_write (fname, items)
-- luacheck: ignore ok
assert(fname ~= '', 'Filename is the empty string')
local suffix = fname:match '%.(%w+)$'
if not suffix then return nil, fname .. ': No filename suffix.' end
local codec = BIBLIO_TYPES[suffix]
if not codec then return nil, fname .. ': Unsupported format.' end
local encode = codec.encode
if not encode then return nil, fname .. ': Cannot write format.' end
if not items or #items == 0 then return suffix end
local ok, str = pcall(encode, items)
if not ok then return nil, fname .. ': Serialisation error.' end
local ok, err, errno = file_write(fname, str, EOL)
if not ok then return nil, err, errno end
return suffix
end
--- Add items from Zotero to a bibliography file.
--
-- If an item is already in the bibliography file, it won't be added again.
-- Prints a warning to STDERR if it overwrites an existing file.
-- Also prints an error to STDERR for every item that cannot be found.
--
-- @string fname The name of the bibliography file.
-- @tab ids The IDs of the items that should be added,
-- e.g., `{'name:2019word', 'name2019WordWordWord'}`.
-- @treturn[1] bool `true` if the file was updated or no update was required.
-- @treturn[2] nil `nil` if an error occurrs.
-- @treturn[2] string An error message.
-- @treturn[2] ?int An error number if the error is a file I/O error.
-- @raise See `zotxt_get_item`.
-- @within Bibliography files
function biblio_update (fname, ids)
-- luacheck: ignore ok fmt err errno
if #ids == 0 then return true end
local fmt, err = biblio_write(fname)
if not fmt then return nil, err end
-- @todo Remove this once the test suite is complete,
-- the script has been dogfed, and was out in the open for a while.
if fmt == 'yaml' or fmt == 'yml' then
warnf 'YAML bibliography file support is EXPERIMENTAL!'
end
local items, err, errno = biblio_read(fname)
if not items then
if errno ~= 2 then return nil, err, errno end
items = {}
end
local item_ids = csl_items_get_ids(items)
local nitems = #items
local n = nitems
for i = 1, #ids do
local id = ids[i]
if not item_ids[id] then
local ok, ret, err = pcall(zotxt_get_item_csl, id)
if not ok then
return nil, ret
elseif ret then
if fmt == 'yaml' or fmt == 'yml' then
ret = rconv_html_to_md(ret)
end
n = n + 1
items[n] = lower_keys(ret)
else
printf(err)
end
end
end
if (n == nitems) then return true end
fmt, err, errno = biblio_write(fname, items)
if not fmt then return nil, err, errno end
return true
end
-- PANDOC
-- ======
do
local ts = {}
for k, v in sorted_pairs(pandoc) do
if type(v) == 'table' and not ts[v] then
local t = {k}
local mt = getmetatable(v)
n = 1
while mt and n < 16 do
if not mt.name or mt.name == 'Type' then break end
n = n + 1
t[n] = mt.name
mt = getmetatable(mt)
end
if t[n] == 'AstElement' then ts[v] = t end
end
end
--- The type of a Pandoc AST element.
--
-- @tparam pandoc.AstElement elem A Pandoc AST element.
-- @treturn[1] string The type
-- (e.g., 'MetaMap', 'Plain').
-- @treturn[1] string The high-order kind
-- (i.e., 'Block', 'Inline', or 'MetaValue').
-- @treturn[1] string The literal 'AstElement'.
-- @treturn[2] nil `nil` if `elem` is not a Pandoc AST element.
-- @within Document parsing
function elem_type (elem)
if type(elem) ~= 'table' then return end
local mt = getmetatable(elem)
if not mt or not mt.__type then return end
return unpack(ts[mt.__type])
end
end
do
local function w_map (tab, ...)
for k, v in pairs(tab) do
tab[k] = walk(v, ...)
end
end
local function w_seq (tab, ...)
for i = 1, #tab do
tab[i] = walk(tab[i], ...)
end
end
local function w_list (elem, ...)
local content = elem.content
for i = 1, #content do
content[i] = w_seq(content[i], ...)
end
end
local walker_fs = {
Meta = w_map,
MetaBlocks = w_seq,
MetaList = w_seq,
MetaInlines = w_seq,
MetaMap = w_map,
BulletList = w_list,
OrderedList = w_list
}
function walker_fs.Doc (doc, ...)
doc.meta = walk(doc.meta, ...)
local blocks = doc.blocks
for i = 1, #blocks do
blocks[i] = walk(blocks[i], ...)
end
end
--- Walk the AST and apply functions to matching elements.
--
-- Differs from `pandoc.walk_block` and `pandoc.walk_inline` by accepting
-- AST elements of *any* type (i.e., including documents as a whole, the
-- metadata block, and metadata fields), by applying the filter to the
-- given element itself, by walking the AST bottom-up (which implies that
-- the filter is applied to every node, regardless of whether any of that
-- node's ancestors matches it), and by allowing the functions in the
-- filter to return arbitrary data (as opposed to either a Pandoc AST
-- element type or `nil`). Use with caution.
--
-- @tparam pandoc.AstElement elem A Pandoc AST element.
-- @tparam {string=func,...} filter A filter.
-- @return The element, with the filter applied.
-- @within Document parsing
-- @fixme Untested.
function walk (elem, filter, _rd)
if not _rd then _rd = 0
else _rd = _rd + 1
end
assert(_rd < 512, 'Too much recursion.')
local ts = table.pack(elem_type(elem))
if ts.n == 0 then return elem end
local walker_f = walker_fs[ts[1]]
if walker_f then walker_f(elem, filter, _rd)
elseif elem.content then w_seq(elem.content, filter, _rd)
end
for i = 1, ts.n do
local func = filter[ts[i]]
if func then
local new = func(elem:clone())
if new ~= nil then elem = new end
end
end
return elem
end
end
--- Collect sources from the document's metadata block.
--
-- Reads the `references` metafata field and every bibliography file
-- referenced by the `bibliography` metadata field.
--
-- Prints errors to STDERR if it cannot parse a bibliography file.
--
-- @tab meta A metadata block.
-- @treturn pandoc.List A list of CSL items.
-- @within Document parsing
function meta_get_sources (meta)
local ret = List()
if not meta then return ret end
if meta.references then ret:extend(meta.references) end
if meta.bibliography then
local fnames
local bibliography = meta.bibliography
if bibliography.tag == 'MetaInlines' then
fnames = {stringify(bibliography)}
elseif bibliography.tag == 'MetaList' then
fnames = bibliography:map(stringify)
else
printf 'Cannot parse metadata field "bibliography".'
return ret
end
for i = 1, #fnames do
local fname, err = file_locate(fnames[i])
if fname then
-- luacheck: ignore err
local items, err = biblio_read(fname)
if items then ret:extend(items)
else printf(err)
end
else
printf(err)
end
end
end
return ret
end
--- Collect the citation keys used in a document.
--
-- Prints errors to STDERR if it cannot parse a bibliography file.
--
-- @tab doc A document.
-- @string[opt] flags If the flag 'u' is given, collects only citation keys
-- of sources that are neither defined in the `references` metadata field
-- nor in any bibliography file.
-- @treturn {string,...} A list of citation keys.
-- @raise An error if an item ID is of an illegal data type.
-- @within Document parsing
function doc_get_ckeys (doc, flags)
-- @fixme `flags` is untested.
local ids = {}
if flags == 'u' and doc.meta then
local items = meta_get_sources(doc.meta)
ids = csl_items_get_ids(items)
end
local n = 0
local ckeys = {}
local flt = {}
function flt.Cite (cite)
for i = 1, #cite.citations do
local id = cite.citations[i].id
if id and not ids[id] then
n = n + 1
ckeys[n] = id
end
end
end
for i = 1, #doc.blocks do
pandoc.walk_block(doc.blocks[i], flt)
end
return ckeys
end
-- MAIN
-- ====
--- Add sources to a bibliography file and the file to the document's metadata.
--
-- Updates the bibliography file as needed and adds it to the `bibliography`
-- metadata field. Interpretes a relative filename as relative to the
-- directory of the first input file passed to **pandoc**, *not* as relative
-- to the current working directory (unless no input files are given).
--
-- @tab meta A metadata block, with the field
-- `zotero-bibliography` set to the filename of the bibliography file.
-- @tab ckeys The citaton keys of the items that should be added,
-- e.g., `{'name:2019word', 'name2019WordWordWord'}`.
-- Citation keys are just item IDs.
-- @treturn[1] tab An updated metadata block, with the field
-- `bibliography` added if needed.
-- @treturn[2] nil `nil` if no sources were found,
-- `zotero-bibliography` is not set, or an error occurred.
-- @treturn[2] string An error message, if applicable.
-- @raise See `zotxt_get_item`.
-- @within Main
function add_biblio (meta, ckeys)
-- luacheck: ignore ok
if #ckeys == 0 then return end
if meta['zotero-bibliography'] == nil then return end
local ok, fname = pcall(stringify, meta['zotero-bibliography'])
if not ok or not fname then
return nil, 'zotero-bibliography: Not a filename.'
elseif fname == '' then
return nil, 'zotero-bibliography: Filename is the empty string ("").'
end
if not path_is_abs(fname) then fname = path_join(wd(), fname) end
local ok, err = biblio_update(fname, ckeys)
if not ok then return nil, err end
if not meta.bibliography then
meta.bibliography = fname
elseif meta.bibliography.tag == 'MetaInlines' then
meta.bibliography = List{fname, meta.bibliography}
elseif meta.bibliography.tag == 'MetaList'
then meta.bibliography = List{unpack(meta.bibliography), fname}
end
return meta
end
--- Add sources to the `references` metadata field.
--
-- Prints an error message to STDERR for every source that cannot be found.
--
-- @tab meta A metadata block.
-- @tab ckeys The citation keys of the items that should be added,
-- e.g., `{'name:2019word', 'name2019WordWordWord'}`.
-- Citation keys are just item IDs.
-- @treturn[1] table An updated metadata block,
-- with the field `references` added if needed.
-- @treturn[2] nil `nil` if no sources were found or an error occurred.
-- @treturn[2] string An error message, if applicable.
-- @raise See `zotxt_get_item`.
-- @within Main
function add_refs (meta, ckeys)
if #ckeys == 0 then return end
if not meta.references then meta.references = MetaList({}) end
local n = #meta.references
for i = 1, #ckeys do
local ok, ret, err = pcall(zotxt_get_item, ckeys[i])
if not ok then return nil, ret
elseif ret then n = n + 1
meta.references[n] = ret
else printf(err)
end
end
return meta
end
--- Collect sources and adds bibliographic data to a document.
--
-- Prints messages to STDERR if errors occur.
--
-- See the manual for details.
--
-- @tparam table doc A document.
-- @treturn[1] table `doc`, but with bibliographic data added.
-- @treturn[2] nil `nil` if nothing was done or an error occurred.
-- @raise See `zotxt_get_item`.
-- @within Main
function main (doc)
local ckeys = doc_get_ckeys(doc, 'u')
if next(ckeys) == nil then return end
for i = 1, 2 do
local add_srcs
if i == 1 then add_srcs = add_biblio
elseif i == 2 then add_srcs = add_refs
end
local meta, err = add_srcs(doc.meta, ckeys)
if meta then
doc.meta = meta
return doc
elseif err then
printf(err)
end
end
end
-- BOILERPLATE
-- ===========
--
-- Returning the whole script, rather than only a list of mappings of
-- Pandoc data types to functions, allows to do unit testing.
M[1] = {Pandoc = main}
return M
| 31.561766 | 102 | 0.596097 |
c9b51d740ca506f46d6f6272eb571f10a2679f47 | 6,909 | rs | Rust | mqttbytes/src/v5/pubrel.rs | IniterWorker/rumqtt | eb1713241584bf602bf3efe02ecceff0ed1c15d7 | [
"Apache-2.0"
] | 342 | 2020-06-25T01:00:36.000Z | 2022-03-28T21:46:35.000Z | mqttbytes/src/v5/pubrel.rs | IniterWorker/rumqtt | eb1713241584bf602bf3efe02ecceff0ed1c15d7 | [
"Apache-2.0"
] | 209 | 2020-06-30T16:49:01.000Z | 2022-03-31T08:11:37.000Z | mqttbytes/src/v5/pubrel.rs | thin-edge/rumqtt | 73e8ea2d91d3136c02c8eec55c9aa26076fa6ab7 | [
"Apache-2.0"
] | 102 | 2020-07-01T06:33:21.000Z | 2022-03-21T07:28:15.000Z | use super::*;
use bytes::{Buf, BufMut, Bytes, BytesMut};
/// Return code in connack
#[derive(Debug, Clone, Copy, PartialEq)]
#[repr(u8)]
pub enum PubRelReason {
Success = 0,
PacketIdentifierNotFound = 146,
}
/// Acknowledgement to QoS1 publish
#[derive(Debug, Clone, PartialEq)]
pub struct PubRel {
pub pkid: u16,
pub reason: PubRelReason,
pub properties: Option<PubRelProperties>,
}
impl PubRel {
pub fn new(pkid: u16) -> PubRel {
PubRel {
pkid,
reason: PubRelReason::Success,
properties: None,
}
}
fn len(&self) -> usize {
let mut len = 2 + 1; // pkid + reason
// If there are no properties during success, sending reason code is optional
if self.reason == PubRelReason::Success && self.properties.is_none() {
return 2;
}
if let Some(properties) = &self.properties {
let properties_len = properties.len();
let properties_len_len = len_len(properties_len);
len += properties_len_len + properties_len;
}
len
}
pub fn read(fixed_header: FixedHeader, mut bytes: Bytes) -> Result<Self, Error> {
let variable_header_index = fixed_header.fixed_header_len;
bytes.advance(variable_header_index);
let pkid = read_u16(&mut bytes)?;
if fixed_header.remaining_len == 2 {
return Ok(PubRel {
pkid,
reason: PubRelReason::Success,
properties: None,
});
}
let ack_reason = read_u8(&mut bytes)?;
if fixed_header.remaining_len < 4 {
return Ok(PubRel {
pkid,
reason: reason(ack_reason)?,
properties: None,
});
}
let puback = PubRel {
pkid,
reason: reason(ack_reason)?,
properties: PubRelProperties::extract(&mut bytes)?,
};
Ok(puback)
}
pub fn write(&self, buffer: &mut BytesMut) -> Result<usize, Error> {
let len = self.len();
buffer.put_u8(0x62);
let count = write_remaining_length(buffer, len)?;
buffer.put_u16(self.pkid);
// If there are no properties during success, sending reason code is optional
if self.reason == PubRelReason::Success && self.properties.is_none() {
return Ok(4);
}
buffer.put_u8(self.reason as u8);
if let Some(properties) = &self.properties {
properties.write(buffer)?;
}
Ok(1 + count + len)
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct PubRelProperties {
pub reason_string: Option<String>,
pub user_properties: Vec<(String, String)>,
}
impl PubRelProperties {
pub fn len(&self) -> usize {
let mut len = 0;
if let Some(reason) = &self.reason_string {
len += 1 + 2 + reason.len();
}
for (key, value) in self.user_properties.iter() {
len += 1 + 2 + key.len() + 2 + value.len();
}
len
}
pub fn extract(mut bytes: &mut Bytes) -> Result<Option<PubRelProperties>, Error> {
let mut reason_string = None;
let mut user_properties = Vec::new();
let (properties_len_len, properties_len) = length(bytes.iter())?;
bytes.advance(properties_len_len);
if properties_len == 0 {
return Ok(None);
}
let mut cursor = 0;
// read until cursor reaches property length. properties_len = 0 will skip this loop
while cursor < properties_len {
let prop = read_u8(&mut bytes)?;
cursor += 1;
match property(prop)? {
PropertyType::ReasonString => {
let reason = read_mqtt_string(&mut bytes)?;
cursor += 2 + reason.len();
reason_string = Some(reason);
}
PropertyType::UserProperty => {
let key = read_mqtt_string(&mut bytes)?;
let value = read_mqtt_string(&mut bytes)?;
cursor += 2 + key.len() + 2 + value.len();
user_properties.push((key, value));
}
_ => return Err(Error::InvalidPropertyType(prop)),
}
}
Ok(Some(PubRelProperties {
reason_string,
user_properties,
}))
}
fn write(&self, buffer: &mut BytesMut) -> Result<(), Error> {
let len = self.len();
write_remaining_length(buffer, len)?;
if let Some(reason) = &self.reason_string {
buffer.put_u8(PropertyType::ReasonString as u8);
write_mqtt_string(buffer, reason);
}
for (key, value) in self.user_properties.iter() {
buffer.put_u8(PropertyType::UserProperty as u8);
write_mqtt_string(buffer, key);
write_mqtt_string(buffer, value);
}
Ok(())
}
}
/// Connection return code type
fn reason(num: u8) -> Result<PubRelReason, Error> {
let code = match num {
0 => PubRelReason::Success,
146 => PubRelReason::PacketIdentifierNotFound,
num => return Err(Error::InvalidConnectReturnCode(num)),
};
Ok(code)
}
#[cfg(test)]
mod test {
use super::*;
use alloc::vec;
use bytes::BytesMut;
use pretty_assertions::assert_eq;
fn sample() -> PubRel {
let properties = PubRelProperties {
reason_string: Some("test".to_owned()),
user_properties: vec![("test".to_owned(), "test".to_owned())],
};
PubRel {
pkid: 42,
reason: PubRelReason::PacketIdentifierNotFound,
properties: Some(properties),
}
}
fn sample_bytes() -> Vec<u8> {
vec![
0x62, // payload type
0x18, // remaining length
0x00, 0x2a, // packet id
0x92, // reason
0x14, // properties len
0x1f, 0x00, 0x04, 0x74, 0x65, 0x73, 0x74, // reason_string
0x26, 0x00, 0x04, 0x74, 0x65, 0x73, 0x74, 0x00, 0x04, 0x74, 0x65, 0x73,
0x74, // user properties
]
}
#[test]
fn pubrel_parsing_works() {
let mut stream = bytes::BytesMut::new();
let packetstream = &sample_bytes();
stream.extend_from_slice(&packetstream[..]);
let fixed_header = parse_fixed_header(stream.iter()).unwrap();
let pubrel_bytes = stream.split_to(fixed_header.frame_length()).freeze();
let pubrel = PubRel::read(fixed_header, pubrel_bytes).unwrap();
assert_eq!(pubrel, sample());
}
#[test]
fn pubrel_encoding_works() {
let pubrel = sample();
let mut buf = BytesMut::new();
pubrel.write(&mut buf).unwrap();
assert_eq!(&buf[..], sample_bytes());
}
}
| 29.029412 | 92 | 0.549139 |
e701c130bf4c62690d0caf8daf7d49991fcc2956 | 5,974 | php | PHP | storage/framework/views/ee51f4724e1043165f5af5cc8a2b86facf0859da.php | mdsaifulcse/mseab | 4e431534cbb1b9d31a024b1200a879b58c175e08 | [
"MIT"
] | null | null | null | storage/framework/views/ee51f4724e1043165f5af5cc8a2b86facf0859da.php | mdsaifulcse/mseab | 4e431534cbb1b9d31a024b1200a879b58c175e08 | [
"MIT"
] | null | null | null | storage/framework/views/ee51f4724e1043165f5af5cc8a2b86facf0859da.php | mdsaifulcse/mseab | 4e431534cbb1b9d31a024b1200a879b58c175e08 | [
"MIT"
] | null | null | null | <?php $__env->startSection('style'); ?>
<link rel="stylesheet" href="<?php echo e(asset('assets/backend/css/dropzone.css')); ?>">
<link rel="stylesheet" href="<?php echo e(asset('assets/backend/css/media-uploader.css')); ?>">
<?php $__env->stopSection(); ?>
<?php $__env->startSection('site-title'); ?>
<?php echo e(__('Edit Profile')); ?>
<?php $__env->stopSection(); ?>
<?php $__env->startSection('content'); ?>
<div class="main-content-inner margin-top-30">
<div class="row">
<div class="col-lg-12">
<?php echo $__env->make('backend.partials.message', \Illuminate\Support\Arr::except(get_defined_vars(), ['__data', '__path']))->render(); ?>
<div class="card">
<div class="card-body">
<?php if($errors->any()): ?>
<div class="alert alert-danger">
<ul>
<?php $__currentLoopData = $errors->all(); $__env->addLoop($__currentLoopData); foreach($__currentLoopData as $error): $__env->incrementLoopIndices(); $loop = $__env->getLastLoop(); ?>
<li><?php echo e($error); ?></li>
<?php endforeach; $__env->popLoop(); $loop = $__env->getLastLoop(); ?>
</ul>
</div>
<?php endif; ?>
<form action="<?php echo e(route('admin.profile.update')); ?>" method="post" enctype="multipart/form-data">
<?php echo csrf_field(); ?>
<div class="form-group">
<label for="username"><?php echo e(__('Username')); ?></label>
<input type="text" class="form-control" name="username" id="username" value="<?php echo e(auth()->user()->username); ?> ">
<small class="info-text"><?php echo e(__('do not use any space between words')); ?></small>
</div>
<div class="form-group">
<label for="name"><?php echo e(__('Name')); ?></label>
<input type="text" class="form-control" id="name" name="name"
value="<?php echo e(auth()->user()->name); ?>">
</div>
<div class="form-group">
<label for="email"><?php echo e(__('Email')); ?></label>
<input type="email" class="form-control" id="email" name="email"
value="<?php echo e(auth()->user()->email); ?> ">
</div>
<div class="form-group">
<?php $image_upload_btn_label = 'Upload Image'; ?>
<div class="media-upload-btn-wrapper">
<div class="img-wrap">
<?php
$profile_img = get_attachment_image_by_id(auth()->user()->image,null,true);
?>
<?php if(!empty($profile_img)): ?>
<div class="attachment-preview">
<div class="thumbnail">
<div class="centered">
<img class="avatar user-thumb" src="<?php echo e($profile_img['img_url']); ?>" alt="<?php echo e(auth()->user()->name); ?>">
</div>
</div>
</div>
<?php $image_upload_btn_label = 'Change Image'; ?>
<?php endif; ?>
</div>
<input type="hidden" name="image" value="<?php echo e(auth()->user()->image); ?>">
<button type="button" class="btn btn-info media_upload_form_btn" data-btntitle="Select Profile Picture" data-modaltitle="Upload Profile Picture" data-imgid="<?php echo e(auth()->user()->image); ?>" data-toggle="modal" data-target="#media_upload_modal">
<?php echo e(__($image_upload_btn_label)); ?>
</button>
</div>
<small class="info-text"><?php echo e(__('Recommended Image Size 100x100. Only Accept: jpg,png.jpeg. Size less than 2MB')); ?></small>
</div>
<div class="form-group">
<button type="submit" class="btn btn-primary"><?php echo e(__('Save changes')); ?></button>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
<?php echo $__env->make('backend.partials.media-upload.media-upload-markup', \Illuminate\Support\Arr::except(get_defined_vars(), ['__data', '__path']))->render(); ?>
<?php $__env->stopSection(); ?>
<?php $__env->startSection('script'); ?>
<script src="<?php echo e(asset('assets/backend/js/dropzone.js')); ?>"></script>
<?php echo $__env->make('backend.partials.media-upload.media-js', \Illuminate\Support\Arr::except(get_defined_vars(), ['__data', '__path']))->render(); ?>
<?php $__env->stopSection(); ?>
<?php echo $__env->make('backend.admin-master', \Illuminate\Support\Arr::except(get_defined_vars(), ['__data', '__path']))->render(); ?><?php /**PATH /home/aapbd/public_html/dohs/resources/views/auth/admin/edit-profile.blade.php ENDPATH**/ ?> | 69.465116 | 288 | 0.436224 |
385554700d440f9ebf8b9432730ed7a29bc11964 | 16,039 | php | PHP | out/pages/terms-conditions.php | ebangali/Elegant | ba055702d8efea59956db008500e3f10e40c45c2 | [
"MIT"
] | null | null | null | out/pages/terms-conditions.php | ebangali/Elegant | ba055702d8efea59956db008500e3f10e40c45c2 | [
"MIT"
] | null | null | null | out/pages/terms-conditions.php | ebangali/Elegant | ba055702d8efea59956db008500e3f10e40c45c2 | [
"MIT"
] | 1 | 2022-03-24T22:44:51.000Z | 2022-03-24T22:44:51.000Z | <?php include_once (dirname(dirname(dirname(__FILE__))).'/initialize.php'); ?>
<?php
include_once(ebbd.'/dbconfig.php');
$adMin = new ebapps\dbconnection\dbconfig();
if(isset($adMin->eBAdminUserIsSet))
{
?>
<?php include_once (eblayout.'/a-common-header-icon.php'); ?>
<?php include_once (eblayout.'/a-common-header-title-one.php'); ?>
<?php include_once (eblayout.'/a-common-header-meta-noindex.php'); ?>
<?php include_once (eblayout.'/a-common-header-meta-scripts-below-body-facebook.php'); ?>
<?php include_once (eblayout.'/a-common-header-meta-scripts.php'); ?>
<?php include_once (eblayout.'/a-common-page-id-start.php'); ?>
<?php include_once (eblayout.'/a-common-header.php'); ?>
<nav>
<div class='container'>
<div>
<?php include_once (eblayout.'/a-common-navebar.php'); ?>
<?php include_once (eblayout.'/a-common-navebar-index-blog.php'); ?>
</div>
</div>
</nav>
<?php include_once (eblayout.'/a-common-page-id-end.php'); ?>
<div class='container'>
<div class='row row-offcanvas row-offcanvas-right'>
<div class='col-xs-12 col-md-2'>
</div>
<div class='col-xs-12 col-md-7 sidebar-offcanvas'>
<div class='well'>
<h1>Terms and Conditions</h1>
</div>
<div class='well'>
<p>Welcome to <?php echo domain; ?>. The following terms and conditions govern your (and any person or entity you enable/allow access through your access) access and use of this website ('Site'), the contents contained herein ('Contents') and the services offered on the Site ('Services').</p>
<p>The term <?php echo domain; ?> refers to the owner of the website. The term 'you', 'your' refers to the user or viewer of the website.</p>
<p>The <b>use</b> of this website is subject to the following terms and conditions:</p>
<p>
<strong>1. Acceptance of Terms and Conditions</strong><br>
1.1 In order to browse, use and/or access the Site and/or the Services you are required to agree to, comply with and be bound by these terms and conditions. You will be deemed to have accepted these terms and conditions by browsing, viewing, accessing and/or using the Site and/or the Services offered on the Site.
</p>
<p>1.2 If you disagree with any part of these terms and conditions, please do not continue to browse, use and/or access the Site and/or the Services.</p>
<p>1.3 In addition to these, there may be additional terms and conditions which apply to individual Services which you will be deemed to have accepted if you register for such Service.</p>
<p>
<strong>2. Registration and use of the Services and/or Site</strong><br>
2.1 In order to access/use or continue to access/use the Site and/or certain Services, you may be required to provide certain personal information. You agree to provide true, accurate, updated, and complete information when registering at the Site or for such Services.
</p>
<p>2.2 You agree to access/use the Site and/or Services only for purposes that are expressly permitted by (i) these terms and conditions and (ii) any applicable law, regulation, guideline or instruction from <?php echo domain; ?>, the industry regulator or any other competent authority or any policy adopted by <?php echo domain; ?> with regard to the acceptable access/use of the Site and/or Services.</p>
<p>
<strong>3. Prohibited use of the Site, Services and/or Contents</strong><br>
3.1 You specifically agree not to access/use (or attempt to access/use) any part of the Site and/or Services through any form of automated means (including use of scripts or web crawlers) and shall ensure that you comply with, in addition to these terms and conditions, instructions set out in relation to the access/use of any part of the Site and/or Services.
</p>
<p>3.2 You agree that you will not engage in any form of activity that interferes with or disrupts any part of the Site and/or Services (or the servers and networks which are connected to the Site and/or Services).</p>
<p>3.3 You agree that you will not reproduce, duplicate, copy, sell, trade or resell any part of the Site and/or Services for any purpose.</p>
<p>3.4 You, and any person or entity you allow to access/use the Site and/or Services through your internet access/access to the Site and/or Services, are not allowed to:</p>
<p>(a) Copy, disclose, modify, reformat, display, distribute, sell, perform, publish, transfer, link to, reverse engineer or decompile or otherwise make available any part of the Site and/or Services except as set out in these terms and conditions.</p>
<p>(b) Abusive, indecent, defamatory, obscene, pornographic, offensive or menacing or that has the effect of causing the recipient to feel so harassed, abused or offended.</p>
<p>(c) In breach of confidence, intellectual property rights, privacy or any right of a third party.</p>
<p>(d) Hack into, make excessive traffic demands, probe or port scan other computers, deliver viruses, mail bombs, chain letters or pyramid schemes or otherwise engage in any other behaviour intended to inhibit other users from accessing/using any part of the Site and/or Services or any other website or services.</p>
<p>(e) Collect and process others personal data except in accordance with applicable data protection law and these terms and conditions.</p>
<p>(f) Advertise or offer to sell goods or services on the pretext that the same are endorsed, offered for sale or originate from <?php echo domain; ?>;</p>
<p>(g) Infringe any other person's or entity’s intellectual property rights.</p>
<p>(h) Use the Site and/or Services to harvest or collect information about users of the Site and/or Services or to post or otherwise distribute unauthorized or unsolicited advertising, junk or bulk email (also known as 'spam'); (j) Use any part of the Site and/or Services in any way that we in our sole and absolute discretion consider objectionable, inappropriate, likely to injure our business, brand, goodwill, reputation or otherwise unacceptable.</p>
<p>(i) Use the Site and/or Services to send emails and other content coached, phrased or written in such a manner as to give an impression that the email is correspondence from <?php echo domain; ?>.</p>
<p>3.5 You are responsible for any misuse of the Site and/or Services even if it is by another person or entity using your access to the Site and/or Services.</p>
<p>3.6 We reserve the right to block, remove, edit or refuse to post any material that you attempt to transmit through the Site and/or Services.</p>
<p>
<strong>4. The information we may collect</strong><br>
4.1 We may collect, inter alia, the following information from you:<br>
(a) Full Name;<br>
(b) Shop/Business/Institute Name;<br>
(c) Business/Institute Type;<br>
(d) Home/Shop/Business Address including District Thana (Police Station).<br>
(e) Contact Information including Mobile Number, E-mail Address.<br>
(f) Demographic Information such as GPS coordinates, Postcode, Preferences and Interests. and<br>
(g) Other Information relevant to Registration, Surveys, and/or Offers.
</p>
<p>4.2 If you do not wish to register, we may not collect any personal information from you and you may visit our site anonymously. However, you will be bound by these terms and conditions with regard to access/use of this Site and/or Services.</p>
<p>
<strong>5. Use of your Information</strong><br />
5.1 Information we may collect from you may be used for the following purposes:<br>
(a) To provide <?php echo domain; ?> Products and Services;<br>
(b) For internal record keeping.<br>
(c) To improve the Site (we continually strive to improve our website based on the information and feedback received from you).<br>
(d) To send periodic e-mails.<br>
(e) To administer a contest, promotion, survey or other Site feature.
</p>
<p>5.2 We implement a variety of security measures to maintain safety to hold and store your personal information. However, neither we nor any third parties provide any warranty or guarantee as to the security and safety measures related to the storage of your personal information.</p>
<p>
<strong>6. Intellectual Property Rights</strong><br>
6.1 All copyright, trade marks, patents, brand names, corporate names and other intellectual property rights in any material or content (including without limitation software, data, applications, information, text, photographs, music, sound, videos, graphics, logos, symbols, artwork, designs, layout, look, appearance and other material or moving images) contained in or accessible via the Services ('Contents') is either owned by <?php echo domain; ?> or by the rights' owner(s) for use as part of the Services and/or on the Site. All trademarks reproduced herein, which are not the property of or licensed to <?php echo domain; ?>, are the property of their respective owners.
</p>
<p>6.2 You are only allowed to access/use the Site and/or Services as set out in these terms and conditions. If you wish to access/use the Contents or any part thereof for any other purpose other than reviewing it on the Site then you will be required to obtain the prior written permission of the owner of the rights in that material. All rights are expressly reserved.</p>
<p>
6.3 Having noted the above you shall not be entitled in respect of any Contents (wholly or partly):<br>
(a) To reproduce, duplicate, copy, sell, trade or resell any part of the Contents for any purpose.<br>
(b) To pass any part of the Contents on to third parties or to allow third parties to access/use any part thereof for any purpose in any way unless and to the extent expressly permitted.<br>
(c) To change, edit, modify, reformat or adapt any part of the Contents in anyway.
</p>
<p>6.4 Any unauthorized access/use of this Site, Services and/or Contents may give rise to a claim for damages, penalty, losses, legal suit and/or amount to a criminal offence. Further, you will also be liable for all costs and consequences arising out of any claim, demand, penalty, proceeding or legal suit (civil or criminal) initiated as a result of your unauthorized access/use of this Site, Services and/or Contents.</p>
<p><strong>7. No Warranties, No Guarantee</strong><br>
7.1 There is no warranty, guarantee for your data. And also there is no change, return, refund for any damage of your device, your time, your money if you use <?php echo domain; ?> services.
</p>
<p>7.2 Your use of any information or materials or services on this website is entirely at your own risk, for which we shall not be liable. It shall be your own responsibility to ensure that any products, services or data available through this website meet your particular wants.</p>
<p>7.3 Any information or materials provided herein is solely for your personal information and convenience, and is not intended for commercial use, nor for trading or investing purposes, and therefore should not be treated as a representation or offer of any kind. Such information or materials are not appropriate for the purposes of making a decision to carry out a transaction or trade, nor does it provide any form of advice (investment, tax, legal) amounting to investment advice, or make any recommendations regarding particular financial instruments, investments, products or services.</p>
<p>7.4 The information contained in this Site, Services and/or Contents may be out of date and/or may contain other errors or omissions. <?php echo domain; ?> provides the Services and Contents on the Site 'as is' and makes no warranties of any kind, either express or implied, with respect to the Services and/or the Contents (including without limitation regarding their satisfactory quality, freedom from viruses or other harmful components, fitness for a particular purpose, suitability, reliability, timeliness, accuracy, completeness, security or that they are free from error).</p>
<p>7.5 Neither <?php echo domain; ?> nor any third party provides any warranty or guarantee or assume any liability or responsibility as to the accuracy, timeliness, performance, completeness, suitability or otherwise of the information or materials (including but not limited to Contents, Services, activities or offers) found, offered or represented on this Site for any particular purpose. You acknowledge that such information, materials, Contents, Services, activities or offers may contain inaccuracies or errors and we expressly exclude liability for any such inaccuracies or errors to the fullest extent permitted by law.</p>
<p><strong>8. Third Party Hyperlinks and Websites</strong><br>
8.1 From time to time, this Site may also include links to other websites. These links are provided for your convenience and to provide further information only.</p>
<p>8.2 The inclusion of such links to third party websites not controlled by us does not imply any endorsement by us of such websites and as such any transaction you may make with any third party flowing from such links is carried out entirely at your own risk and we accept no liability for any damages or losses that you may suffer as a result.</p>
<p>8.3 These third party websites have separate and independent terms and conditions and privacy policies for their access/use. Neither we nor any third parties provide any warranty or guarantee or assume any liability or responsibility as to the accuracy, timeliness, performance, completeness, suitability or otherwise of the information or materials (including but not limited to contents, services, activities or offers) found, offered or represented on the linked website(s) for any particular purpose. You acknowledge that such information or materials may contain inaccuracies or errors and we expressly exclude liability for any such inaccuracies or errors to the fullest extent permitted by law. <?php echo domain; ?> explicitly disclaims any responsibility or liability as to any consequential losses or damages arising as a consequence of any of the above.</p>
<p><strong>9. Use of Cookies</strong><br>
9.1 This website uses sessions, do not use cookies at or in our Site. However, we do not take any responsibility of any third party websites whose links have been included in our Site as to whether such websites use cookies or not.</p>
<p><strong>10. Liability</strong><br>
10.1 Your use/access of any information or materials on this Site is entirely at your own risk. It shall be your own responsibility to ensure that any products, services, or information available through this Site meet your specific requirements. We shall not be liable or responsible in any regard whatsoever for your use/access of any information or materials on this Site.</p>
<p>10.2 <?php echo domain; ?> shall not be liable for any loss of use, access, profits or data or any direct, indirect, special or consequential damages or losses, whether such losses or damages arise in contract, negligence, tort or otherwise including without limitation to the foregoing any damages or losses in relation to:</p>
<p>(a) Your use of, reliance upon or inability to use the Site, Services and/or Contents.</p>
<p>(b) If you are dissatisfied or you do not agree with any part of the Site, Services, Contents and/or any of these terms and conditions, your sole and exclusive remedy is to discontinue your use/access of this Site, Services and/or Contents.</p>
<p><strong>11. General</strong><br>
11.1 The information and materials provided in relation to the Contents and Services on the pages of this Site are for your general information and use/access only and any part of such information and materials is subject to change without notice.</p>
<p>11.2 <?php echo domain; ?> reserves the right to review, revise, amend and/or change the Services, Contents, Site, its policies and/or these terms and conditions at any time without any prior notice and you will be deemed to have been bound by such revision, amendment or change by continuing to use/access the Site, Services and/or Contents.</p>
</div>
</div>
<div class='col-xs-12 col-md-3 sidebar-offcanvas'>
</div>
</div>
</div>
<?php include_once (eblayout.'/a-common-footer.php'); ?>
<?php
}
else
{
header("Location: ".outLink."/access/admin-register.php");
}
?> | 114.564286 | 871 | 0.771557 |
da65385464bd8f701299f82de25c802af1b95aef | 1,029 | php | PHP | app/Http/Controllers/UserReceiptsController.php | OmarFaruk20/Laravel_pos | 5a62a91f94f43cf20b4e98ec8e1e0d57303de5e3 | [
"MIT"
] | null | null | null | app/Http/Controllers/UserReceiptsController.php | OmarFaruk20/Laravel_pos | 5a62a91f94f43cf20b4e98ec8e1e0d57303de5e3 | [
"MIT"
] | null | null | null | app/Http/Controllers/UserReceiptsController.php | OmarFaruk20/Laravel_pos | 5a62a91f94f43cf20b4e98ec8e1e0d57303de5e3 | [
"MIT"
] | null | null | null | <?php
namespace App\Http\Controllers;
use App\Receipt;
use App\User;
use App\Http\Requests\ReceiptRequest;
use Illuminate\Support\Facades\Auth;
use Illuminate\Http\Request;
class UserReceiptsController extends Controller
{
public function __construct()
{
$this->data['tab_manu'] = 'receipts';
}
public function index($id)
{
$this->data['user'] = User::findOrFail($id);
return view('users.receipts.receipts', $this->data);
}
public function store(ReceiptRequest $request, $user_id){
$formdata = $request->all();
$formdata['user_id'] = $user_id;
$formdata['admin_id'] = Auth::id();
Receipt::create($formdata);
return redirect()->route('users.receipts', $user_id)->with('message', 'Receipts Added Successfully');
}
public function destroy($user_id, $receipts_id){
Receipt::destroy($receipts_id);
return redirect()->route('users.receipts', $user_id)->with('message', 'Receipts Deleted Successfully');
}
}
| 26.384615 | 111 | 0.648202 |
5887226b4cee12caa513dfaac0735b43c227c288 | 6,499 | css | CSS | public/css/app.css | BorisKa11/basket | a186aec53a65d8facfd966d4f6c074ce642509d2 | [
"MIT"
] | null | null | null | public/css/app.css | BorisKa11/basket | a186aec53a65d8facfd966d4f6c074ce642509d2 | [
"MIT"
] | null | null | null | public/css/app.css | BorisKa11/basket | a186aec53a65d8facfd966d4f6c074ce642509d2 | [
"MIT"
] | null | null | null | @import url(https://fonts.googleapis.com/css?family=Nunito);*{padding:0;margin:0;box-sizing:border-box}#app{min-height:calc(100vh - 100px);max-width:1024px;width:95%;margin:0 auto}.alert{margin:20px;background-color:#0c5a9b;color:#fff;display:flex;align-items:center;padding:15px 25px;border-radius:12px}.frm_logout{display:none}.navbar{display:flex;height:40px;flex-grow:1;justify-content:space-between;align-items:center;background-color:#0c5a9b;padding:0 20px}.navbar a{text-decoration:none;transition:all .3s;padding:0 25px;color:#fff;line-height:40px;display:block}.navbar a:hover{background-color:#fff;color:#0c5a9b}.navbar--link{font-weight:700}.navbar--links{display:flex}.navbar--links a{margin:0 15px}.navbar--links a:first-child{margin-left:0}.navbar--links a:nth-last-child(-n+1){margin-right:0}.footer{height:40px;display:flex;justify-content:center;align-items:center}.card,.footer{background-color:#0c5a9b;color:#fff}.card{position:relative;max-width:320px;margin:50px auto 0;border-radius:15px;padding:0 20px 20px}@media (max-width:768px){.card{margin-top:25px}}.card-header{font-size:1.2em;padding:10px 20px;margin:0 -20px 10px;background-color:#fff;color:#0c5a9b;border:2px solid #0c5a9b;border-bottom:none;border-radius:15px 15px 0 0}.card .form label{display:block;font-weight:bolder;font-size:.9em}.card .form input[type=email],.card .form input[type=password],.card .form input[type=text]{display:block;outline:none;font-size:1.8em;padding:6px 12px;border-radius:12px;max-width:100%;margin:5px 0 15px}.card .form .form-check{display:flex;justify-content:left;align-items:center;margin-bottom:20px}.card .form .form-check label{display:inline-block;font-weight:bolder;margin-left:10px}.card .form button{display:block;outline:none;font-size:1.5em;padding:6px 12px;border-radius:12px;max-width:100%;margin:5px 0 15px;transition:all .1s;cursor:pointer;color:#0c5a9b}.card .form button:hover{box-shadow:2px 2px 2px 2px rgba(0,0,0,.4),inset 2px 2px 2px rgba(0,0,0,.4)}.card .form a.btn-link{color:#fff}.pagination{margin:25px 0}.pagination ul{display:flex;list-style:none;justify-content:center;align-items:center}.pagination ul a{padding:6px 12px;background-color:#0c5a9b;color:#fff;border-radius:6px;margin:0 5px;font-size:1.3em;transition:all .3s;text-decoration:none;border:2px solid #0c5a9b}.pagination ul a:hover{color:#0c5a9b;background-color:#fff}.pagination ul li.active span,.pagination ul span.dots{padding:6px 12px;font-size:1.3em}.pagination ul li.active span{color:#0c5a9b;font-weight:bolder;border-radius:6px;margin:0 5px;border:2px solid #0c5a9b}#errorsShow{position:fixed;top:15%;right:50px;padding:10px 20px;border-radius:6px;display:none;font-size:smaller;width:300px;z-index:10000;color:#fff}#errorsShow.success{background-color:#1fad2b}#errorsShow.error{background-color:#e3342f}#errorsShow.info{background-color:#3580df}.page_title{font-weight:700;font-size:1.4em;margin:20px 30px}.items{display:flex;flex-wrap:wrap}.items .item{position:relative;flex:1 0 calc(25% - 30px);margin:0 15px 15px;border:1px solid #ddd;border-radius:10px;padding:15px 15px 50px}@media (max-width:972px){.items .item{flex:1 0 calc(33% - 30px)}}@media (max-width:768px){.items .item{flex:1 0 calc(50% - 30px)}}@media (max-width:640px){.items .item{flex:1 0 calc(100% - 30px)}}.items .item--title{margin:-15px -15px 10px;background-color:#0c5a9b;color:#fff;padding:15px}.items .item--description{font-size:.9em;color:#0c5a9b}.items .item--bottom{display:flex;position:absolute;width:100%;bottom:0;justify-content:space-between;border-top:1px solid #ddd;margin:10px -15px 0}.items .item--bottom div{display:flex;height:40px;justify-content:center;align-items:center;flex:1 0 50%}.items .item--bottom--price{border-right:1px solid #ddd;font-weight:bolder}.items .item--bottom--price .rub{margin-left:5px}.items .item--bottom--price .rub:before{content:"P"}.items .item--bottom--basket a{transition:all .3s;text-decoration:none;font-size:1.4em;font-weight:bolder;display:flex;justify-content:center;align-items:center;width:100%;height:100%}.items .item--bottom--basket a:before{content:"+"}.items .item--bottom--basket a:hover{background-color:#0c5a9b;color:#fff}.basket{position:relative;display:flex;flex-wrap:wrap;flex-grow:1;justify-content:space-between;min-height:40px;align-items:center;border:1px solid #ddd;margin-bottom:5px}.basket.removable{background-color:#e3342f;color:#fff}.basket .remove_confirm{transition:all .3s;filter:opacity(0);z-index:-1;justify-content:end;display:flex;width:100%;position:absolute;top:0;left:0;min-height:100%;background:linear-gradient(#f66d9b,#e3342f);color:#fff;align-items:center}.basket .remove_confirm a{flex:0 1 50px;display:flex;justify-content:center;border-radius:6px;margin:0 15px;padding:3px 0;transition:all .3s;text-decoration:none;background:#fff;color:#e3342f}.basket .remove_confirm a:hover{color:#fff}.basket .remove_confirm a.btn_yes:hover{background:#1fad2b}.basket .remove_confirm a.btn_no:hover{background:#e3342f}.basket.header_rows{font-weight:bolder;background-color:#0c5a9b;color:#fff}@media (max-width:978px){.basket.header_rows{display:none}}.basket.basket_row_color:nth-child(odd){background-color:#ddd}.basket--title{flex-grow:1;padding-left:10px}@media (max-width:978px){.basket--title{flex:1 0 100%;background-color:#0c5a9b;color:#fff;min-height:30px;align-items:center;display:flex}}.basket--bottom{display:flex;flex:0 1 350px;justify-content:space-between;align-items:center}@media (max-width:978px){.basket--bottom{flex:0 1 100%;margin:10px 0 5px;justify-content:space-between;padding:0 15px}}.basket--bottom--price{font-weight:bolder}.basket--bottom--price .rub{margin-left:5px}.basket--bottom--price .rub:before{content:"P"}.basket--bottom--buttons{display:flex;align-items:center}.basket--bottom--buttons .count_product{margin:0 15px;font-weight:bolder;font-size:1.2em}.basket--bottom--buttons a{width:50px;text-decoration:none;font-size:1.4em;display:flex;justify-content:center;align-items:center;margin-left:5px;margin-right:5px;border-radius:6px}.basket--bottom--buttons a.plus_basket{background-color:#38c172;color:#fff;margin-left:0}.basket--bottom--buttons a.plus_basket:before{content:"+"}.basket--bottom--buttons a.minus_basket{background-color:#f6993f;color:#fff}.basket--bottom--buttons a.minus_basket:before{content:"-"}.basket--bottom--buttons a.remove_basket{margin-right:0;background-color:#e3342f;color:#fff}.basket--bottom--buttons a.remove_basket:before{content:"+";transform:rotate(45deg)} | 6,499 | 6,499 | 0.788737 |
a9eb3fd5d8331cd18db9001994fb0603cb07337d | 7,001 | php | PHP | resources/views/page/lienhe.blade.php | alinh99/laravelBarberShopVer2 | 49f25d5f8dd643498d1b4f4edbe5ab049cf5dce0 | [
"MIT"
] | null | null | null | resources/views/page/lienhe.blade.php | alinh99/laravelBarberShopVer2 | 49f25d5f8dd643498d1b4f4edbe5ab049cf5dce0 | [
"MIT"
] | 3 | 2021-02-02T21:36:42.000Z | 2022-02-27T11:15:45.000Z | resources/views/page/lienhe.blade.php | alinh99/laravelBarberShopVer2 | 49f25d5f8dd643498d1b4f4edbe5ab049cf5dce0 | [
"MIT"
] | null | null | null | @extends('master')
@section('content')
@include('page\modal-booking')
<div class="container">
<!-- Start: Contatti+gmap -->
<div id="page" class="page">
<section
class="padding-110px-tb bg-white builder-bg xs-padding-60px-tb border-none"
id="contact-section17"
>
<div class="container">
<div class="row">
<!-- section title -->
<div class="col-md-12 col-sm-12 col-xs-12 text-center">
<h2
class="section-title-large sm-section-title-medium xs-section-title-large text-black font-weight-600 alt-font tz-text margin-ten-bottom xs-margin-fifteen-bottom"
>
Liên hệ
</h2>
</div>
<!-- end section title -->
</div>
<div class="row">
<!-- contact detail -->
<div
class="col-md-12 col-sm-12 col-xs-12 no-padding text-center center-col clear-both"
>
<div
class="col-md-4 col-sm-4 col-xs-12 xs-margin-thirteen-bottom"
>
<div
class="col-md-2 vertical-align-middle no-padding display-block md-margin-nine-bottom xs-margin-three-bottom"
>
<i
class="fa ti-location-pin icon-extra-large text-sky-blue xs-icon-medium-large tz-icon-color"
></i>
</div>
<div
class="col-md-10 vertical-align-middle text-left no-padding text-black md-display-block sm-text-center text-medium tz-text"
>
154 Phan Dang Luu, Hai Chau District <br /> Da Nang, Viet Nam.
</div>
</div>
<div
class="col-md-4 col-sm-4 col-xs-12 xs-margin-thirteen-bottom"
>
<div
class="col-md-3 vertical-align-middle no-padding display-block md-margin-nine-bottom xs-margin-three-bottom"
>
<i
class="fa ti-email icon-extra-large text-sky-blue xs-icon-medium-large tz-icon-color"
></i>
</div>
<div
class="col-md-9 vertical-align-middle text-left no-padding md-display-block sm-text-center"
>
<div
class="text-medium font-weight-600 text-black display-block tz-text"
>
General Enquiries
</div>
<a
class="tz-text text-black text-medium"
href="mailto:alinh1803@gmail.com"
>alinh1803@gmail.com</a
>
</div>
</div>
<div class="col-md-4 col-sm-4 col-xs-12">
<div
class="col-md-2 vertical-align-middle no-padding display-block md-margin-nine-bottom xs-margin-three-bottom"
>
<i
class="fa ti-mobile icon-extra-large text-sky-blue xs-icon-medium-large tz-icon-color"
></i>
</div>
<div
class="col-md-10 vertical-align-middle text-left no-padding md-display-block sm-text-center"
>
<div
class="text-medium font-weight-600 text-black display-block tz-text"
>
Call Us Today!
</div>
<div class="text-medium text-black tz-text">
+84 (9) 35 232 661
</div>
</div>
</div>
</div>
<!-- end contact detail -->
<!-- map -->
<div
class="col-md-12 col-sm-12 col-xs-12 map margin-ten-top margin-ten-bottom"
>
<iframe
class="width-100"
height="313"
id="map_canvas1"
src="https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d3151.843821917424!2d144.956054!3d-37.817127!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x6ad65d4c2b349649%3A0xb6899234e561db11!2sEnvato!5e0!3m2!1sen!2sin!4v1427947693651"
></iframe>
</div>
<!-- end map -->
<!-- contact form -->
@if(count($errors)>0)
<div class="alert alert-danger">
@foreach($errors->all() as $err)
{{$err}}</br>
@endforeach
</div>
@endif
@if(session()->has('success'))
<div class="alert alert-success">{{session()->get('success')}}</div>
@endif
<form action="{{route('lienhe')}}" method="post" class="float-left width-100">
<input type="hidden" name="_token" value="{{csrf_token()}}">
<div
class="col-md-12 col-sm-12 center-col contact-form-style2 no-padding"
>
<div class="col-md-6 col-sm-6">
<input
id="name"
type="text"
{{-- data-email="required" --}}
placeholder="*Tên của bạn"
class="medium-input"
name="name"
/>
<input
type="text"
{{-- data-email="required" --}}
placeholder="* Email của bạn"
class="medium-input"
id="email"
name="email"
/>
<input
type="number"
name="phone"
id="phone"
placeholder="Số điện thoại của bạn"
class="medium-input"
/>
</div>
<div class="col-md-6 col-sm-6">
<textarea
id="notes"
name="note"
placeholder="Nội dung..."
class="medium-input"
></textarea>
<button
class="contact-submit btn-medium btn bg-sky-blue text-black tz-text"
type="submit"
>
SEND MESSAGE
</button>
</div>
</div>
</form>
<!-- end contact form -->
</div>
</div>
</section>
</div>
<!-- javascript libraries -->
<!-- End: Contatti+gmap -->
</div>
@endsection
| 40.235632 | 254 | 0.41337 |
b69a96b43e4ff514adad4f65f4932b40aab46c57 | 8,982 | swift | Swift | BetterShoppingList/Persistence/Persistence.swift | xaviaracil/better-shopping-list | a9115e7caf2bcf75a6125c766d8f85fd9230b51c | [
"MIT"
] | null | null | null | BetterShoppingList/Persistence/Persistence.swift | xaviaracil/better-shopping-list | a9115e7caf2bcf75a6125c766d8f85fd9230b51c | [
"MIT"
] | null | null | null | BetterShoppingList/Persistence/Persistence.swift | xaviaracil/better-shopping-list | a9115e7caf2bcf75a6125c766d8f85fd9230b51c | [
"MIT"
] | null | null | null | //
// Persistence.swift
// BetterShoppingList
//
// Created by Xavi Aracil on 31/3/22.
//
import CoreData
import CloudKit
import Combine
class PersistenceController {
// MARK: - NSPersistentHistoryToken variables
var lastToken: NSPersistentHistoryToken? = nil {
didSet {
guard let token = lastToken,
let data = try? NSKeyedArchiver.archivedData(
withRootObject: token,
requiringSecureCoding: true
) else { return }
do {
try data.write(to: tokenFile)
} catch {
let message = "Could not write token data"
print("###\(#function): \(message): \(error)")
}
}
}
lazy var tokenFile: URL = {
let url = NSPersistentContainer.defaultDirectoryURL().appendingPathComponent(
"BetterShoppingList",
isDirectory: true
)
if !FileManager.default.fileExists(atPath: url.path) {
do {
try FileManager.default.createDirectory(
at: url,
withIntermediateDirectories: true,
attributes: nil
)
} catch {
let message = "Could not create persistent container URL"
print("###\(#function): \(message): \(error)")
}
}
return url.appendingPathComponent("token.data", isDirectory: false)
}()
private var cancellableSet: Set<AnyCancellable> = []
// MARK: - static variables
static let shared = PersistenceController()
static var preview: PersistenceController = {
let result = PersistenceController(inMemory: true)
let viewContext = result.container.viewContext
let lock = PersistenceLock()
if !lock.exists {
do {
try viewContext.deleteAllObjects()
PersistenceTestData.load(in: viewContext)
try viewContext.save()
} catch {
let nsError = error as NSError
fatalError("Unresolved error \(nsError), \(nsError.userInfo)")
}
}
return result
}()
let container: NSPersistentCloudKitContainer
static let appGroup = "group.name.xaviaracil.BetterShoppingList.shared"
static let publicName = "Model-public"
// MARK: - Constructor
// swiftlint:disable function_body_length
init(inMemory: Bool = false) {
container = NSPersistentCloudKitContainer(name: "Model")
if inMemory {
container.persistentStoreDescriptions.first!.url = URL(fileURLWithPath: "/dev/null")
} else {
guard let description = container.persistentStoreDescriptions.first else {
fatalError("😱 \(#function): Failed to retrieve a persistent store description.")
}
// define private database
description.url = URL.storeURL(for: PersistenceController.appGroup, databaseName: "Model-private")
description.setOption(true as NSNumber, forKey: NSPersistentHistoryTrackingKey)
description.setOption(true as NSNumber, forKey: NSPersistentStoreRemoteChangeNotificationPostOptionKey)
description.configuration = "Local"
let containerIdentifier = description.cloudKitContainerOptions!.containerIdentifier
let privateOptions = NSPersistentCloudKitContainerOptions(containerIdentifier: containerIdentifier)
privateOptions.databaseScope = .private
description.cloudKitContainerOptions = privateOptions
// define public datababase
// swiftlint:disable:next line_length
let publicStoreUrl = URL.storeURL(for: PersistenceController.appGroup, databaseName: PersistenceController.publicName)
let publicDescription = NSPersistentStoreDescription(url: publicStoreUrl)
publicDescription.configuration = "Public"
publicDescription.setOption(true as NSNumber, forKey: NSPersistentHistoryTrackingKey)
// swiftlint:disable:next line_length
publicDescription.setOption(true as NSNumber, forKey: NSPersistentStoreRemoteChangeNotificationPostOptionKey)
let publicOptions = NSPersistentCloudKitContainerOptions(containerIdentifier: containerIdentifier)
publicOptions.databaseScope = .public
publicDescription.cloudKitContainerOptions = publicOptions
container.persistentStoreDescriptions.append(publicDescription)
}
container.loadPersistentStores(completionHandler: { (_, error) in
guard let error = error as NSError? else { return }
fatalError("😱 \(#function): Failed to load persistent stores: \(error)")
})
container.viewContext.automaticallyMergesChangesFromParent = true
if !inMemory {
do {
try container.viewContext.setQueryGenerationFrom(.current)
} catch {
print("Error in setQueryGenerationFrom: \(error)")
}
}
// Only initialize the schema when building the app with the
// Debug build configuration.
#if DEBUG
if !inMemory {
do {
// Use the container to initialize the development schema.
try container.initializeCloudKitSchema(options: [.dryRun])
} catch {
// Handle any errors.
print("Error initializing CloudKit: \(error)")
}
}
#endif
loadHistoryToken()
initNotifications(inMemory: inMemory)
}
func initNotifications(inMemory: Bool) {
if !inMemory {
NotificationCenter.default
.publisher(for: .NSPersistentStoreRemoteChange)
.sink {
self.processRemoteStoreChange($0)
}
.store(in: &cancellableSet)
} else {
NotificationCenter.default
.publisher(for: NSPersistentCloudKitContainer.eventChangedNotification)
.receive(on: RunLoop.main)
.sink {
self.processContainerChanged($0)
}
.store(in: &cancellableSet)
}
}
private var historyRequestQueue = DispatchQueue(label: "history")
private func loadHistoryToken() {
do {
let tokenData = try Data(contentsOf: tokenFile)
lastToken = try NSKeyedUnarchiver
.unarchivedObject(ofClass: NSPersistentHistoryToken.self, from: tokenData)
} catch {
// log any errors
}
}
func processRemoteStoreChange(_ notification: Notification) {
historyRequestQueue.async {
let backgroundContext = self.container.newBackgroundContext()
backgroundContext.performAndWait {
let request = NSPersistentHistoryChangeRequest
.fetchHistory(after: self.lastToken)
do {
let result = try backgroundContext.execute(request) as?
NSPersistentHistoryResult
guard
let transactions = result?.result as? [NSPersistentHistoryTransaction],
!transactions.isEmpty
else {
return
}
if let newToken = transactions.last?.token {
self.lastToken = newToken
}
self.mergeChanges(from: transactions)
} catch {
// log any errors
}
}
}
}
private func mergeChanges(from transactions: [NSPersistentHistoryTransaction]) {
let context = container.viewContext
context.perform {
transactions.forEach { transaction in
guard let userInfo = transaction.objectIDNotification().userInfo else {
return
}
NSManagedObjectContext
.mergeChanges(fromRemoteContextSave: userInfo, into: [context])
}
}
}
private func processContainerChanged(_ notification: Notification) {
// swiftlint:disable line_length
guard let event = notification.userInfo?[NSPersistentCloudKitContainer.eventNotificationUserInfoKey] as? NSPersistentCloudKitContainer.Event,
event.type == .setup else {
print("wrong type of notification")
return
}
if !event.succeeded,
let error = event.error {
let nsError = error as NSError
if nsError.code == 134400 {
// error initializing database: Unable to initialize without an iCloud account (CKAccountStatusNoAccount)
// since we are here only in inMemory initializations, load test data
PersistenceTestData.load(in: container.viewContext)
}
}
}
}
| 36.661224 | 149 | 0.60098 |
d87b4d253b758156a379f72dbe15839b6a140ddc | 59,306 | sql | SQL | create_table_from_word/base_table.sql | yongli82/CodeGenerator | 4ca9255c3c4c5392e45815fd20f605ccbbfd2325 | [
"MIT"
] | null | null | null | create_table_from_word/base_table.sql | yongli82/CodeGenerator | 4ca9255c3c4c5392e45815fd20f605ccbbfd2325 | [
"MIT"
] | null | null | null | create_table_from_word/base_table.sql | yongli82/CodeGenerator | 4ca9255c3c4c5392e45815fd20f605ccbbfd2325 | [
"MIT"
] | null | null | null | DROP TABLE IF EXISTS `FC_AccountingDimensionConfig`;
CREATE TABLE `FC_AccountingDimensionConfig` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`Dimension` varchar(64) COMMENT '结账纬度 1 客户Id 2 方案ID 3 shopID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '结账纬度配置' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_AccountingAccessToken`;
CREATE TABLE `FC_AccountingAccessToken` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`AccessToken` varchar(64) COMMENT '授权码',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '授权码表' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginAccountPayable`;
CREATE TABLE `FC_OriginAccountPayable` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号 , BusinessSource + BusinessType + OutBizId 保证唯一',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称或编码',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '应付日期',
`Amount` decimal(10,2) COMMENT '金额',
`DailyId` int(11) NOT NULL DEFAULT 0 COMMENT '应付ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_DailyId(`DailyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始应付明细' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginAPExtendInfo`;
CREATE TABLE `FC_OriginAPExtendInfo` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`DetailId` int(11) NOT NULL DEFAULT 0 COMMENT '原始应付明细ID',
`VarName` varchar(64) COMMENT '属性名称',
`VarValue` varchar(255) COMMENT '属性值',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_DetailId(`DetailId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始应付明细扩展信息' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_AccountPayable`;
CREATE TABLE `FC_AccountPayable` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '应付日期',
`Amount` decimal(10,2) COMMENT '金额',
`MonthlyId` int(11) NOT NULL DEFAULT 0 COMMENT '月度应付ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_MonthlyId(`MonthlyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '应付' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyAccountPayable`;
CREATE TABLE `FC_MonthlyAccountPayable` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '应付月份',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度应付' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyAPSummary`;
CREATE TABLE `FC_MonthlyAPSummary` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '应付月份yyyyMM',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度应付汇总' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginAccountReceivable`;
CREATE TABLE `FC_OriginAccountReceivable` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '应收日期',
`Amount` decimal(10,2) COMMENT '金额',
`DailyId` int(11) NOT NULL DEFAULT 0 COMMENT '应收ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_DailyId(`DailyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始应收明细' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginARExtendInfo`;
CREATE TABLE `FC_OriginARExtendInfo` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`DetailId` int(11) NOT NULL DEFAULT 0 COMMENT '原始应收明细ID',
`VarName` varchar(64) COMMENT '属性名称',
`VarValue` varchar(255) COMMENT '属性值',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_DetailId(`DetailId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始应收明细扩展信息' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_AccountReceivable`;
CREATE TABLE `FC_AccountReceivable` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '应收日期',
`Amount` decimal(10,2) COMMENT '金额',
`MonthlyId` int(11) NOT NULL DEFAULT 0 COMMENT '月度应收ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_MonthlyId(`MonthlyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '应收' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyAccountReceivable`;
CREATE TABLE `FC_MonthlyAccountReceivable` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '应收月份',
`Amount` decimal(10,2) COMMENT '金额',
`MonthlyId` int(11) NOT NULL DEFAULT 0 COMMENT '月度汇总ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_MonthlyId(`MonthlyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度应收' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyARSummary`;
CREATE TABLE `FC_MonthlyARSummary` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '应收月份',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度应收汇总' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_PaymentType`;
CREATE TABLE `FC_PaymentType` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`Name` varchar(64) COMMENT '付款类型名称 1 预付款付款(银行) 2 应付款付款 3 点付宝提现 4 保证金付款',
`Description` varchar(255) COMMENT '付款类型描述',
`Code` varchar(16) COMMENT '付款类型编号',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '付款类型' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginPayment`;
CREATE TABLE `FC_OriginPayment` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`PaymentTypeId` int(11) NOT NULL DEFAULT 0 COMMENT '付款类型',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`PayChannel` tinyint(4) NOT NULL DEFAULT 0 COMMENT '付款渠道 1 银行 2 支付宝 3 微信',
`PayEntityId` int(11) NOT NULL DEFAULT 0 COMMENT '付款实体ID',
`PaymentDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '付款日期',
`Amount` decimal(10,2) COMMENT '金额',
`DailyId` int(11) NOT NULL DEFAULT 0 COMMENT '付款ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_DailyId(`DailyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始付款明细' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginPaymentExtendInfo`;
CREATE TABLE `FC_OriginPaymentExtendInfo` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`DetailId` int(11) NOT NULL DEFAULT 0 COMMENT '原始付款明细ID',
`VarName` varchar(64) COMMENT '属性名称',
`VarValue` varchar(255) COMMENT '属性值',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_DetailId(`DetailId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始付款明细扩展信息' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_Payment`;
CREATE TABLE `FC_Payment` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`PaymentTypeId` int(11) NOT NULL DEFAULT 0 COMMENT '付款类型',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`PayChannel` tinyint(4) NOT NULL DEFAULT 0 COMMENT '付款渠道 1 银行 2 支付宝 3 微信',
`PayEntityId` int(11) NOT NULL DEFAULT 0 COMMENT '付款实体ID',
`PaymentDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '付款日期',
`Amount` decimal(10,2) COMMENT '金额',
`MonthlyId` int(11) NOT NULL DEFAULT 0 COMMENT '月度付款ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_MonthlyId(`MonthlyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '付款' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyPayment`;
CREATE TABLE `FC_MonthlyPayment` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`PaymentTypeId` int(11) NOT NULL DEFAULT 0 COMMENT '付款类型',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`PayChannel` tinyint(4) NOT NULL DEFAULT 0 COMMENT '付款渠道 1 银行 2 支付宝 3 微信',
`PayEntityId` int(11) NOT NULL DEFAULT 0 COMMENT '付款实体ID',
`PaymentMonth` int(11) NOT NULL DEFAULT 0 COMMENT '付款月份',
`Amount` decimal(10,2) COMMENT '金额',
`MonthlyId` int(11) NOT NULL DEFAULT 0 COMMENT '月度汇总ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_MonthlyId(`MonthlyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度付款' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyPaymentSummary`;
CREATE TABLE `FC_MonthlyPaymentSummary` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`PaymentTypeId` int(11) NOT NULL DEFAULT 0 COMMENT '付款类型',
`PaymentMonth` int(11) NOT NULL DEFAULT 0 COMMENT '付款月份',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`PayChannel` tinyint(4) NOT NULL DEFAULT 0 COMMENT '付款渠道 1 银行 2 支付宝 3 微信',
`PayEntityId` int(11) NOT NULL DEFAULT 0 COMMENT '付款实体ID',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度付款汇总' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_CollectionType`;
CREATE TABLE `FC_CollectionType` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`Name` varchar(64) COMMENT '收款类型名称1 押金 2 赔偿 3 上架费 4 充值',
`Description` varchar(255) COMMENT '付款类型描述',
`Code` varchar(16) COMMENT '付款类型编号',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '收款类型' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginCollection`;
CREATE TABLE `FC_OriginCollection` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`CollectionTypeId` int(11) NOT NULL DEFAULT 0 COMMENT '收款类型',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`ReceiveChannel` tinyint(4) NOT NULL DEFAULT 0 COMMENT '收款渠道 1 银行 2 支付宝 3 微信',
`ReceiveEntityId` int(11) NOT NULL DEFAULT 0 COMMENT '收款实体ID',
`CollectionDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '收款日期',
`Amount` decimal(10,2) COMMENT '金额',
`DailyId` int(11) NOT NULL DEFAULT 0 COMMENT '收款ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_DailyId(`DailyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始收款明细' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginCollectionExtendInfo`;
CREATE TABLE `FC_OriginCollectionExtendInfo` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`DetailId` int(11) NOT NULL DEFAULT 0 COMMENT '原始收款明细ID',
`VarName` varchar(64) COMMENT '属性名称',
`VarValue` varchar(255) COMMENT '属性值',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_DetailId(`DetailId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始收款明细扩展信息' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_Collection`;
CREATE TABLE `FC_Collection` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`CollectionTypeId` int(11) NOT NULL DEFAULT 0 COMMENT '收款类型',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`ReceiveChannel` tinyint(4) NOT NULL DEFAULT 0 COMMENT '收款渠道 1 银行 2 支付宝 3 微信',
`ReceiveEntityId` int(11) NOT NULL DEFAULT 0 COMMENT '收款实体ID',
`CollectionDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '收款日期',
`Amount` decimal(10,2) COMMENT '金额',
`MonthlyId` int(11) NOT NULL DEFAULT 0 COMMENT '月度收款Id',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_MonthlyId(`MonthlyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '收款' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyCollection`;
CREATE TABLE `FC_MonthlyCollection` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`CollectionTypeId` int(11) NOT NULL DEFAULT 0 COMMENT '收款类型',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户Id',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`ReceiveChannel` tinyint(4) NOT NULL DEFAULT 0 COMMENT '收款渠道 1 银行 2 支付宝 3 微信',
`ReceiveEntityId` int(11) NOT NULL DEFAULT 0 COMMENT '收款实体ID',
`CollectionMonth` varchar(16) COMMENT '收款月份',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度收款' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyCollectionSummary`;
CREATE TABLE `FC_MonthlyCollectionSummary` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`CollectionTypeId` int(11) NOT NULL DEFAULT 0 COMMENT '收款类型',
`CollectionMonth` varchar(16) COMMENT '收款月份',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`ReceiveChannel` tinyint(4) NOT NULL DEFAULT 0 COMMENT '收款渠道 1 银行 2 支付宝 3 微信',
`ReceiveEntityId` int(11) NOT NULL DEFAULT 0 COMMENT '收款实体ID',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度收款汇总' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginPrePayment`;
CREATE TABLE `FC_OriginPrePayment` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '预付日期',
`Amount` decimal(10,2) COMMENT '金额',
`DailyId` int(11) NOT NULL DEFAULT 0 COMMENT '预付ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_DailyId(`DailyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始预付明细' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginPrePaymentExtendInfo`;
CREATE TABLE `FC_OriginPrePaymentExtendInfo` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`DetailId` int(11) NOT NULL DEFAULT 0 COMMENT '原始预付明细ID',
`VarName` varchar(64) COMMENT '属性名称',
`VarValue` varchar(255) COMMENT '属性值',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_DetailId(`DetailId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始预付明细扩展信息' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_PrePayment`;
CREATE TABLE `FC_PrePayment` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '预付日期',
`Amount` decimal(10,2) COMMENT '金额',
`MonthlyId` int(11) NOT NULL DEFAULT 0 COMMENT '月度预付ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_MonthlyId(`MonthlyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '预付' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyPrePayment`;
CREATE TABLE `FC_MonthlyPrePayment` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '预付月份',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度预付' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyPrePaymentSummary`;
CREATE TABLE `FC_MonthlyPrePaymentSummary` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '预付月份',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度预付汇总' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginPreCollection`;
CREATE TABLE `FC_OriginPreCollection` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '预收日期',
`Amount` decimal(10,2) COMMENT '金额',
`DailyId` int(11) NOT NULL DEFAULT 0 COMMENT '预收ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_DailyId(`DailyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始预收明细' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginPreCollectionExtendInfo`;
CREATE TABLE `FC_OriginPreCollectionExtendInfo` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`DetailId` int(11) NOT NULL DEFAULT 0 COMMENT '原始预收明细ID',
`VarName` varchar(64) COMMENT '属性名称',
`VarValue` varchar(255) COMMENT '属性值',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_DetailId(`DetailId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始预收明细扩展信息' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_PreCollection`;
CREATE TABLE `FC_PreCollection` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户Id',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`CollectionDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '收款日期',
`Amount` decimal(10,2) COMMENT '金额',
`MonthlyId` int(11) NOT NULL DEFAULT 0 COMMENT '月度预收ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_MonthlyId(`MonthlyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '预收' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyPreCollection`;
CREATE TABLE `FC_MonthlyPreCollection` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '预收月份',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度预收' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyPreCollectionSummary`;
CREATE TABLE `FC_MonthlyPreCollectionSummary` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '预收月份',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度预收汇总' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginBadDebtExtendInfo`;
CREATE TABLE `FC_OriginBadDebtExtendInfo` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`DetailId` int(11) NOT NULL DEFAULT 0 COMMENT '原始预收明细ID',
`VarName` varchar(64) COMMENT '属性名称',
`VarValue` varchar(255) COMMENT '属性值',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_DetailId(`DetailId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始坏账明细扩展信息' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginBadDebt`;
CREATE TABLE `FC_OriginBadDebt` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`BadDebtDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '坏账确认日期',
`Amount` decimal(10,2) COMMENT '金额',
`MonthlyId` int(11) NOT NULL DEFAULT 0 COMMENT '月度坏账ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_MonthlyId(`MonthlyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始坏账明细' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyBadDebt`;
CREATE TABLE `FC_MonthlyBadDebt` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '坏账确认月份',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度坏账' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyBadDebtSummary`;
CREATE TABLE `FC_MonthlyBadDebtSummary` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '坏账确认月份',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度坏账汇总' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginInvoiceExtendInfo`;
CREATE TABLE `FC_OriginInvoiceExtendInfo` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`DetailId` int(11) NOT NULL DEFAULT 0 COMMENT '原始预收明细ID',
`VarName` varchar(64) COMMENT '属性名称',
`VarValue` varchar(255) COMMENT '属性值',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_DetailId(`DetailId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始发票明细扩展信息' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginInvoice`;
CREATE TABLE `FC_OriginInvoice` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`InvoiceType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '发票性质 1 技术服务费 2 广告费 3 广告发布费',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '开票公司Id',
`InvoiceHeader` varchar(255) COMMENT '发票抬头',
`Amount` decimal(10,2) COMMENT '发票金额',
`InvoiceDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '发票日期',
`TaxNumber` varchar(32) COMMENT '税号',
`DailyId` int(11) NOT NULL DEFAULT 0 COMMENT '开票ID',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_DailyId(`DailyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始开票信息' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_Invoice`;
CREATE TABLE `FC_Invoice` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`InvoiceType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '发票性质 1 技术服务费 2 广告费 3 广告发布费',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '开票公司Id',
`Amount` decimal(10,2) COMMENT '发票金额',
`InvoiceDate` datetime COMMENT '发票日期',
`MonthlyId` int(11) NOT NULL DEFAULT 0 COMMENT '月度Id',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_MonthlyId(`MonthlyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '开票' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyInvoice`;
CREATE TABLE `FC_MonthlyInvoice` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`OutBizId` varchar(32) COMMENT '外部业务号',
`InvoiceType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '发票性质 1 技术服务费 2 广告费 3 广告发布费',
`CustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '客户ID',
`CustomerName` varchar(255) COMMENT '客户名称',
`SchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '方案ID',
`SchemaName` varchar(255) COMMENT '方案名称',
`ShopId` int(11) NOT NULL DEFAULT 0 COMMENT '分店ID',
`ShopName` varchar(255) COMMENT '分店名称',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '开票公司Id',
`Amount` decimal(10,2) COMMENT '发票金额',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '发票月份',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
UNIQUE KEY UX_OutBiz(`OutBizId`,`BusinessSource`,`BusinessType`),
KEY IX_CustomerId(`CustomerId`),
KEY IX_SchemaId(`SchemaId`),
KEY IX_ShopId(`ShopId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度开票' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyInvoiceSummary`;
CREATE TABLE `FC_MonthlyInvoiceSummary` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '月份',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度开票汇总' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginVerificationExtendInfo`;
CREATE TABLE `FC_OriginVerificationExtendInfo` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`DetailId` int(11) NOT NULL DEFAULT 0 COMMENT '原始预收明细ID',
`VarName` varchar(64) COMMENT '属性名称',
`VarValue` varchar(255) COMMENT '属性值',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_DetailId(`DetailId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '原始明细扩展信息' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_OriginVerification`;
CREATE TABLE `FC_OriginVerification` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`OutBusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '转出业务类型 1 团购 2 闪惠 3 闪付 4 电影 5 推广',
`InBusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '转入业务类型 1 团购 2 闪惠 3 闪付 4 电影 5 推广',
`OutBizId` varchar(32) COMMENT '外部业务号',
`OutAccountingSubject` int(11) NOT NULL DEFAULT 0 COMMENT '转出账务科目',
`InAccountingSubject` int(11) NOT NULL DEFAULT 0 COMMENT '转入账务科目',
`OutCustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '转出客户ID',
`OutCustomerName` varchar(255) COMMENT '转出客户名称',
`OutSchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '转出方案ID',
`OutSchemaName` varchar(255) COMMENT '转出方案名称',
`OutShopId` int(11) NOT NULL DEFAULT 0 COMMENT '转出分店ID',
`OutShopName` varchar(255) COMMENT '转出分店名称',
`InCustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '转入客户ID',
`InCustomerName` varchar(255) COMMENT '转入客户名称',
`InSchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '转入方案ID',
`InSchemaName` varchar(255) COMMENT '转入方案名称',
`InShopId` int(11) NOT NULL DEFAULT 0 COMMENT '转入分店ID',
`InShopName` varchar(255) COMMENT '转入分店名称',
`VerificationDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '核销日期',
`Amount` decimal(10,2) COMMENT '金额',
`DailyId` int(11) NOT NULL DEFAULT 0 COMMENT '',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_DailyId(`DailyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '核销' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_Verification`;
CREATE TABLE `FC_Verification` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`OutBusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '转出业务类型 1 团购 2 闪惠 3 闪付 4 电影 5 推广',
`InBusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '转入业务类型 1 团购 2 闪惠 3 闪付 4 电影 5 推广',
`OutBizId` varchar(32) COMMENT '外部业务号',
`OutAccountingSubject` int(11) NOT NULL DEFAULT 0 COMMENT '转出账务科目',
`InAccountingSubject` int(11) NOT NULL DEFAULT 0 COMMENT '转入账务科目',
`OutCustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '转出客户ID',
`OutCustomerName` varchar(255) COMMENT '转出客户名称',
`OutSchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '转出方案ID',
`OutSchemaName` varchar(255) COMMENT '转出方案名称',
`OutShopId` int(11) NOT NULL DEFAULT 0 COMMENT '转出分店ID',
`OutShopName` varchar(255) COMMENT '转出分店名称',
`InCustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '转入客户ID',
`InCustomerName` varchar(255) COMMENT '转入客户名称',
`InSchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '转入方案ID',
`InSchemaName` varchar(255) COMMENT '转入方案名称',
`InShopId` int(11) NOT NULL DEFAULT 0 COMMENT '转入分店ID',
`InShopName` varchar(255) COMMENT '转入分店名称',
`VerificationDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '核销日期',
`Amount` decimal(10,2) COMMENT '金额',
`MonthlyId` int(11) NOT NULL DEFAULT 0 COMMENT '',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_MonthlyId(`MonthlyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '核销' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyVerification`;
CREATE TABLE `FC_MonthlyVerification` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`OutBusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '转出业务类型 1 团购 2 闪惠 3 闪付 4 电影 5 推广',
`InBusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '转入业务类型 1 团购 2 闪惠 3 闪付 4 电影 5 推广',
`OutBizId` varchar(32) COMMENT '外部业务号',
`OutAccountingSubject` int(11) NOT NULL DEFAULT 0 COMMENT '转出账务科目',
`InAccountingSubject` int(11) NOT NULL DEFAULT 0 COMMENT '转入账务科目',
`OutCustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '转出客户ID',
`OutCustomerName` varchar(255) COMMENT '转出客户名称',
`OutSchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '转出方案ID',
`OutSchemaName` varchar(255) COMMENT '转出方案名称',
`OutShopId` int(11) NOT NULL DEFAULT 0 COMMENT '转出分店ID',
`OutShopName` varchar(255) COMMENT '转出分店名称',
`InCustomerId` int(11) NOT NULL DEFAULT 0 COMMENT '转入客户ID',
`InCustomerName` varchar(255) COMMENT '转入客户名称',
`InSchemaId` int(11) NOT NULL DEFAULT 0 COMMENT '转入方案ID',
`InSchemaName` varchar(255) COMMENT '转入方案名称',
`InShopId` int(11) NOT NULL DEFAULT 0 COMMENT '转入分店ID',
`InShopName` varchar(255) COMMENT '转入分店名称',
`VerificationDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '核销日期',
`Amount` decimal(10,2) COMMENT '金额',
`DailyId` int(11) NOT NULL DEFAULT 0 COMMENT '',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_DailyId(`DailyId`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度核销' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_MonthlyVerificationSummary`;
CREATE TABLE `FC_MonthlyVerificationSummary` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BusinessType` tinyint(4) NOT NULL DEFAULT 0 COMMENT '业务类型: 1团购, 2预约预订, 3结婚亲子, 4储值卡, 5广告, 6闪惠, 7费用, 8闪付, 9电影, 10点菜, 11KTV预订, 12点付宝',
`AccountingMonth` int(11) NOT NULL DEFAULT 0 COMMENT '月份',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`Amount` decimal(10,2) COMMENT '金额',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '月度核销汇总' ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP TABLE IF EXISTS `FC_JeHistory`;
CREATE TABLE `FC_JeHistory` (
`Id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`BatchNum` varchar(240) COMMENT '导入的批标识',
`GroupId` int(11) NOT NULL DEFAULT 0 COMMENT '凭证分组ID',
`BusinessSource` tinyint(4) NOT NULL DEFAULT 0 COMMENT '数据来源: 1天玑 2账务 3结算 4推广 5支付中心',
`BusinessCatagory` varchar(125) COMMENT '业务系统分类(大类)',
`BusinessDetailedCategory` varchar(125) COMMENT '业务系统明细分类(小类)',
`CompanyId` int(11) NOT NULL DEFAULT 0 COMMENT '公司ID',
`BankId` int(11) NOT NULL DEFAULT 0 COMMENT '银行ID?(支付宝?)',
`Algorithm` varchar(125) COMMENT '准则 PRC/US',
`GLDate` date COMMENT 'GL日期',
`PeriodName` varchar(100) COMMENT '期间(2010-01)',
`Currency` varchar(10) COMMENT 'CNY/USD/HKD',
`CurrencyRateType` varchar(20) COMMENT 'User/Corporate',
`CurrencyRateDate` date COMMENT '汇率时间',
`CurrencyRate` decimal(16,6) COMMENT '原币金额',
`Amount` decimal(16,6) COMMENT '本位币金额',
`CreationDate` datetime COMMENT '数据创建日期,精确到秒',
`CreatedBy` varchar(100) COMMENT '数据创建者',
`RequestId` int(11) NOT NULL DEFAULT 0 COMMENT '',
`CityId` varchar(64) COMMENT '城市ID',
`DepartmentId` varchar(64) COMMENT '部门ID',
`DepartmentName` varchar(255) COMMENT '部门名称',
`GLSubAccount` varchar(255) COMMENT '子目类',
`EmpType` varchar(32) COMMENT '员工类型',
`AddTime` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' COMMENT '创建时间',
`UpdateTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
`RecordStatus` tinyint(4) NOT NULL DEFAULT 1 COMMENT '记录状态: 1 有效, 2 无效',
PRIMARY KEY (`Id`),
KEY IX_UpdateTime(`UpdateTime`),
KEY IX_AddTime(`AddTime`)
) COMMENT '财务凭证中间表' ENGINE=InnoDB DEFAULT CHARSET=utf8; | 53.284816 | 136 | 0.70057 |
6d26afd0844323fd8b842dcb7bc984f02c473230 | 208 | ts | TypeScript | jhipster/jhipster-monolithic/src/main/webapp/app/entities/comment/comment.model.ts | menty44/tutorials | 96c1d20a2136786d4911ed7dd8b74d34c3800362 | [
"MIT"
] | null | null | null | jhipster/jhipster-monolithic/src/main/webapp/app/entities/comment/comment.model.ts | menty44/tutorials | 96c1d20a2136786d4911ed7dd8b74d34c3800362 | [
"MIT"
] | null | null | null | jhipster/jhipster-monolithic/src/main/webapp/app/entities/comment/comment.model.ts | menty44/tutorials | 96c1d20a2136786d4911ed7dd8b74d34c3800362 | [
"MIT"
] | null | null | null | import {Post} from '../post';
export class Comment {
constructor(
public id?: number,
public text?: string,
public creationDate?: any,
public post?: Post,
) {
}
}
| 17.333333 | 34 | 0.538462 |
a166b46bab2553c6e376cc560b366d67ed401156 | 266 | ts | TypeScript | lib/components/app-sidenav/index.d.ts | mowcixo/ontimize-web-ngx-compiled | a100031f3fc1a50171a60e795b35aa1a26de9a99 | [
"Apache-2.0"
] | 27 | 2017-12-13T19:21:26.000Z | 2022-01-06T10:15:13.000Z | lib/components/app-sidenav/index.d.ts | mowcixo/ontimize-web-ngx-compiled | a100031f3fc1a50171a60e795b35aa1a26de9a99 | [
"Apache-2.0"
] | 383 | 2017-09-28T14:14:00.000Z | 2022-03-31T19:06:09.000Z | lib/components/app-sidenav/index.d.ts | mowcixo/ontimize-web-ngx-compiled | a100031f3fc1a50171a60e795b35aa1a26de9a99 | [
"Apache-2.0"
] | 17 | 2017-09-28T08:46:42.000Z | 2021-02-25T14:46:50.000Z | export * from './image/o-app-sidenav-image.component';
export * from './menu-group/o-app-sidenav-menu-group.component';
export * from './menu-item/o-app-sidenav-menu-item.component';
export * from './o-app-sidenav.component';
export * from './o-app-sidenav.module';
| 44.333333 | 64 | 0.718045 |
38ba6a4ba810998e6e6180d80f4131f1759684f3 | 4,592 | php | PHP | deleteAccounts.php | bradson85/PASCAL | 1d010d9c1f57dbc602742c489c2bd2af09a16485 | [
"BSD-3-Clause"
] | null | null | null | deleteAccounts.php | bradson85/PASCAL | 1d010d9c1f57dbc602742c489c2bd2af09a16485 | [
"BSD-3-Clause"
] | null | null | null | deleteAccounts.php | bradson85/PASCAL | 1d010d9c1f57dbc602742c489c2bd2af09a16485 | [
"BSD-3-Clause"
] | null | null | null | <?php session_start();?>
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<title>Delete Accounts</title>
<!-- Bootstrap core CSS -->
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta/css/bootstrap.min.css" integrity="sha384-/Y6pD6FV/Vv2HJnA6t+vslU6fwYXjCFtcEpHbNJ0lyAFsXTsjBbfaDjzALeQsN6M"
crossorigin="anonymous">
<!-- Custom styles for this template -->
<link rel="stylesheet" href="/css/cssAddWords.css" />
</head>
<body>
<!-- Add Nav Bar part-->
<?php include "topbar-header.php";
include "sidebar-header.php"
?>
<!-- Start main html-->
<main class="col-sm-9 ml-sm-auto col-md-10 pt-3" role="main">
<!-- Alert boxes stuff-->
<?php
include("alertmessages-header.php");
// check get variable for import success
if(isset($_GET["imp"])) {
echo specialMessages($_GET["imp"],"success");
} else{
// check get for import fail
if(isset($_GET["fal"])) {
echo specialMessages($_GET["fal"],"error");
}else{
echo simpleMessages();
}
}
?>
<!-- Modal to ask are you sure if you want to delete-->
<div id ="sure" class="modal fade">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Are You Sure?</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
<p>Modal body text goes here.</p>
</div>
<div class="modal-footer">
<button type="button" id="modalsave" class="btn btn-danger">Delete</button>
<button type="button" id ="modalclose "class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<!-- END of Alert box stufff-->
<H1>Delete Accounts</H1>
<!-- Table Part-->
<div class="table-responsive">
<H2>Students</H2>
<table id="student_table" class="table table-striped table-bordered">
<thead>
<tr>
<th>Name</th>
<th>Email</th>
<th> </th>
</tr>
</thead>
<tbody id = "t_body1"></tbody>
</table>
</div>
<div class="table-responsive">
<H2>Teachers</H2>
<table id="teacher_table" class="table table-striped table-bordered">
<thead>
<tr>
<th>Name</th>
<th>Email</th>
<th> </th>
</tr>
</thead>
<tbody id = "t_body2"></tbody>
</table>
</div>
<div class="table-responsive">
<H2>Admins</H2>
<table id="admin_table" class="table table-striped table-bordered">
<thead>
<tr>
<th>Name</th>
<th>Email</th>
<th> </th>
</tr>
</thead>
<tbody id = "t_body3"></tbody>
</table>
</div>
</main>
</div>
</div>
<!-- Optional JavaScript -->
<!-- jQuery first, then Popper.js, then Bootstrap JS Code from bootstrap site -->
<script src="https://code.jquery.com/jquery-3.2.1.min.js" integrity="sha256-hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4="
crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.11.0/umd/popper.min.js" integrity="sha384-b/U6ypiBEHpOf/4+1nzFpr53nxSS+GLCkfwBdFNTxtclqqenISfwAzpKaMNFNmj4"
crossorigin="anonymous"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta/js/bootstrap.min.js" integrity="sha384-h0AbiXch4ZDo7tp9hKZ4TsHbi047NrKGLO3SEJAg45jXxnGIfYzk4Si90RDIqNm1"
crossorigin="anonymous"></script>
<script src="https://code.jquery.com/qunit/qunit-2.4.1.js"></script>
<script src="/tests/tests.js"></script>
<script>
if (typeof ($.fn.modal) === 'undefined') {
document.write('<script src="/js/bootstrap.min.js"><\/script>')
}
</script>
<script>
window.jQuery || document.write('<script src="/js/jquery-3.2.1.min.js"><\/script>');
</script>
<div id="bootstrapCssTest" class="hidden"></div>
<script>
$(function () {
if ($('#bootstrapCssTest').is(':visible')) {
$("head").prepend('<link rel="stylesheet" href="/css/bootstrap.min.css">');
}
});
</script>
<script src="/js/deleteAccounts.js"></script>
</body>
</html>
| 32.567376 | 190 | 0.574042 |
10daf7dce7303daa0439194d9399e3926761060a | 384 | kt | Kotlin | math/src/main/java/com/kylecorry/sol/math/analysis/CosineWave.kt | kylecorry31/sol | 435041dc5c4c8f868a2ccc24cb4e1eaafe501a15 | [
"MIT"
] | 1 | 2021-09-24T05:25:57.000Z | 2021-09-24T05:25:57.000Z | math/src/main/java/com/kylecorry/sol/math/analysis/CosineWave.kt | kylecorry31/sol | 435041dc5c4c8f868a2ccc24cb4e1eaafe501a15 | [
"MIT"
] | 19 | 2021-09-04T21:50:36.000Z | 2021-12-07T00:34:46.000Z | math/src/main/java/com/kylecorry/sol/math/analysis/CosineWave.kt | kylecorry31/sol | 435041dc5c4c8f868a2ccc24cb4e1eaafe501a15 | [
"MIT"
] | 2 | 2021-11-11T19:51:26.000Z | 2021-12-11T05:32:18.000Z | package com.kylecorry.sol.math.analysis
import kotlin.math.cos
class CosineWave(
override val amplitude: Float,
override val frequency: Float,
override val horizontalShift: Float,
override val verticalShift: Float
) : Waveform {
override fun calculate(x: Float): Float {
return amplitude * cos(frequency * (x - horizontalShift)) + verticalShift
}
} | 24 | 81 | 0.710938 |
7952149793404ac86afb1507f2416337f2e1d35d | 5,256 | php | PHP | web/2002-1/390/390_09_BackyardEvildoers.php | dww-circle/sw-d8 | 928881308e9f175d681cd0050deacbb22ecd5bb7 | [
"MIT"
] | null | null | null | web/2002-1/390/390_09_BackyardEvildoers.php | dww-circle/sw-d8 | 928881308e9f175d681cd0050deacbb22ecd5bb7 | [
"MIT"
] | 2 | 2021-05-08T22:38:12.000Z | 2022-02-10T17:50:38.000Z | web/2002-1/390/390_09_BackyardEvildoers.php | ISO-tech/sw-d8 | 928881308e9f175d681cd0050deacbb22ecd5bb7 | [
"MIT"
] | null | null | null | <html>
<head>
<title>
Evildoers in his backyard
</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
<?php include "../../legacy-includes/Script.htmlf" ?>
</head>
<body bgcolor="#FFFFCC" text="000000" link="990000" vlink="660000" alink="003366" leftmargin="0" topmargin="0">
<table width="744" cellspacing="0" cellpadding="0" border="0">
<tr><td width="474"><a name="Top"></a><?php include "../../legacy-includes/TopLogo.htmlf" ?></td>
<td width="270"><?php include "../../legacy-includes/TopAd.htmlf" ?>
</td></tr></table>
<table width="744" cellspacing="0" cellpadding="0" border="0">
<tr><td width="18" bgcolor="FFCC66"></td>
<td width="108" bgcolor="FFCC66" valign=top><?php include "../../legacy-includes/LeftButtons.htmlf" ?></td>
<td width="18"></td>
<td width="480" valign="top">
<?php include "../../legacy-includes/BodyInsert.htmlf" ?>
<P><font face="Arial, Helvetica, sans-serif" size="2"><b>READING BETWEEN THE LINES</b></font><BR>
<font face="Times New Roman, Times, serif" size="5"><b>Evildoers in his backyard</b></font></P>
<P><font face="Times New Roman, Times, serif" size="2"><b>By Lance Selfa</b></font><font face="Arial, Helvetica, sans-serif" size="2"> | January 18, 2002 | Page 8</font></P>
<font face="Times New Roman, Times, serif" size="3"><P>THE U.S. government's handling of last year's anthrax attacks in Washington and New York said a lot about Washington's priorities.</P>
<P>While the House of Representatives evacuated the Capitol and authorities made sure to vaccinate "essential personnel"--from President Bush to Capitol Police dogs--postal workers were told to remain at their jobs. As a result, two postal workers were the only ones killed by anthrax in D.C.</P>
<P>Three months later, the investigation into the anthrax terror reveals much the same about Washington's real priorities. In October, a parade of "terrorism experts" filled editorial pages with assertions that the attacks bore the fingerprints of al-Qaeda or Iraq. Bush said he "wouldn't put it past" bin Laden or Saddam Hussein to try to kill Americans with anthrax.</P>
<P>The case against Saddam defied logic. Why would the Iraqi government sponsor an attack that would hand the Texecutioner its own death warrant and lose any anti-sanctions support it had gained?</P>
<P>Most experts without an ax to grind said the attacks bore the hallmarks of domestic terrorists. But few in Washington were listening.</P>
<P>When bioweapons expert Barbara Hatch Rosenberg contended that the attacks most likely originated in a U.S. government laboratory, it was reported in a German environmentalist magazine in November. By late December, even the Bush administration admitted a domestic culprit was the most likely suspect.</P>
<P>The focus on foreign "evildoers" had its uses for Bush and Co., however. At the height of the anthrax hysteria, Congress pushed through the USA PATRIOT Act. </P>
<P>The focus on Saddam Hussein also diverted attention from what should have been a bombshell revelation. The U.S. military admitted to manufacturing weapons of mass destruction it said it had stopped producing in 1969. In fact, the U.S. Army acknowledged that it had manufactured "weaponized" anthrax since 1992.</P>
<P>What's more, the genetic structure of the anthrax found in the letters to Sens. Tom Daschle and Patrick Leahy matched the strain developed in U.S. labs. Yet a government that quickly rounded up more than 1,000 immigrants--most of whom have yet to be charged with crimes--has moved noticeably slower to apprehend the anthrax terrorists.</P>
<P>There are political reasons for the slow response. The likely suspects come from one of two places--a government weapons lab or the swamps of the far right.</P>
<P>An investigation of U.S. weapons labs would open a real can of worms. How would Bush and Co. answer enraged Americans demanding to know why the U.S. created weapons used against them?</P>
<P>On the other hand, a look at far-right outfits in the U.S. might have brought the feds a little too close for comfort for the likes of Attorney General John Ashcroft. Ashcroft received $26,500 for his failed 2000 presidential campaign from the executives of AmeriVision, a Christian Right version of the liberal Working Assets long-distance phone service, according to a <I>Salon</I> investigation. </P>
<P>Besides Ashcroft's presidential campaign, AmeriVision funds Prisoners of Christ, a support organization for anti-abortion zealots jailed for bombing clinics and murdering abortion providers. At its annual fundraising banquet, Prisoners of Christ auctions such souvenirs as the watch that timed a firebomb at an abortion provider.</P>
<P>The government has shut down several major Islamic charities on suspicion--not proof--of connections to Islamic fundamentalist groups. But don't expect action against the likes of Prisoners of Christ.</P>
<P>It's one thing to wage a "war on terrorism" against overseas or immigrant "evildoers." It's another to go after your campaign contributors.</P>
<?php include "../../legacy-includes/BottomNavLinks.htmlf" ?>
<td width="12"></td>
<td width="108" valign="top">
<?php include "../../legacy-includes/RightAdFolder.htmlf" ?>
</td>
</tr>
</table>
</body>
</html>
| 87.6 | 406 | 0.756088 |
ce5a8621f9ca519c327f54da256ef1ebeaaa12e5 | 2,089 | kt | Kotlin | permissionhelper/src/main/java/com/midsizemango/permissionhelper/Permissions.kt | MidsizeMango/Permissions-Handler | 26f1f42db372fef811636bcbea43ff5d4ada494d | [
"MIT"
] | 1 | 2018-11-25T11:04:04.000Z | 2018-11-25T11:04:04.000Z | permissionhelper/src/main/java/com/midsizemango/permissionhelper/Permissions.kt | MidsizeMango/Permissions-Handler | 26f1f42db372fef811636bcbea43ff5d4ada494d | [
"MIT"
] | null | null | null | permissionhelper/src/main/java/com/midsizemango/permissionhelper/Permissions.kt | MidsizeMango/Permissions-Handler | 26f1f42db372fef811636bcbea43ff5d4ada494d | [
"MIT"
] | null | null | null | package com.midsizemango.permissionhelper
import android.Manifest
/**
* Created by prasad on 7/16/17.
*/
object Permissions {
val GROUP_CAMERA = Manifest.permission_group.CAMERA
val GROUP_CONTACTS = Manifest.permission_group.CONTACTS
val GROUP_CALENDAR = Manifest.permission_group.CALENDAR
val GROUP_SMS = Manifest.permission_group.SMS
val GROUP_MICROPHONE = Manifest.permission_group.MICROPHONE
val GROUP_PHONE = Manifest.permission_group.PHONE
val GROUP_SENSORS = Manifest.permission_group.SENSORS
val GROUP_LOCATION = Manifest.permission_group.LOCATION
val GROUP_STORAGE = Manifest.permission_group.STORAGE
val CAMERA = Manifest.permission.CAMERA
val READ_EXTERNAL_STORAGE = Manifest.permission.READ_EXTERNAL_STORAGE
val WRITE_EXTERNAL_STORAGE = Manifest.permission.WRITE_EXTERNAL_STORAGE
val READ_CALENDAR = Manifest.permission.READ_CALENDAR
val WRITE_CALENDAR = Manifest.permission.WRITE_CALENDAR
val READ_CONTACTS = Manifest.permission.READ_CONTACTS
val WRITE_CONTACTS = Manifest.permission.WRITE_CONTACTS
val GET_ACCOUNTS = Manifest.permission.GET_ACCOUNTS
val ACCESS_FINE_LOCATION = Manifest.permission.ACCESS_FINE_LOCATION
val ACCESS_COARSE_LOCATION = Manifest.permission.ACCESS_COARSE_LOCATION
val READ_PHONE_STATE = Manifest.permission.READ_PHONE_STATE
val CALL_PHONE = Manifest.permission.CALL_PHONE
val READ_CALL_LOG = Manifest.permission.READ_CALL_LOG
val WRITE_CALL_LOG = Manifest.permission.WRITE_CALL_LOG
val ADD_VOICEMAIL = Manifest.permission.ADD_VOICEMAIL
val USE_SIP = Manifest.permission.USE_SIP
val PROCESS_OUTGOING_CALLS = Manifest.permission.PROCESS_OUTGOING_CALLS
val SEND_SMS = Manifest.permission.SEND_SMS
val RECEIVE_SMS = Manifest.permission.RECEIVE_SMS
val READ_SMS = Manifest.permission.READ_SMS
val RECEIVE_WAP_PUSH = Manifest.permission.RECEIVE_WAP_PUSH
val RECEIVE_MMS = Manifest.permission.RECEIVE_MMS
val RECORD_AUDIO = Manifest.permission.RECORD_AUDIO
val BODY_SENSORS = Manifest.permission.BODY_SENSORS
} | 40.173077 | 75 | 0.805649 |
f43faab60a3e52ff9a7afde341c433a0e73f6b7a | 6,580 | ts | TypeScript | storage/src/mem/key_value_store_mem.ts | formuladb/poc2 | 6aff67fd217e6193efcdd97d5ba34193f8ae8254 | [
"MIT"
] | 1 | 2022-02-05T16:07:42.000Z | 2022-02-05T16:07:42.000Z | storage/src/mem/key_value_store_mem.ts | formuladb/poc2 | 6aff67fd217e6193efcdd97d5ba34193f8ae8254 | [
"MIT"
] | null | null | null | storage/src/mem/key_value_store_mem.ts | formuladb/poc2 | 6aff67fd217e6193efcdd97d5ba34193f8ae8254 | [
"MIT"
] | null | null | null | /**
* © 2018 S.C. FORMULA DATABASE S.R.L.
* License TBD
*/
import { RangeQueryOptsI, KeyValueStoreFactoryI, KeyValueStoreI, KeyObjStoreI, kvsKey2Str, KeyTableStoreI, ScalarType, kvsReduceValues } from "@storage/key_value_store_i";
import * as _ from "lodash";
import { KeyValueObj, KeyValueError, KeyValueObjIdType, _idAsStr } from "@domain/key_value_obj";
import { ReduceFunDefaultValue, SumReduceFunN, CountReduceFunN, TextjoinReduceFunN, ReduceFun } from "@domain/metadata/reduce_functions";
import { Entity, Schema } from "@domain/metadata/entity";
import { Expression } from "jsep";
import { evalExpression } from "@functions/map_reduce_utils";
import { SimpleAddHocQueryFilterItem, AggFunc, SimpleAddHocQuery } from "@domain/metadata/simple-add-hoc-query";
import { MetadataStore } from "@storage/metadata-store";
import { GitStorageMem } from "@storage/git-storage-mem";
import { simpleAdHocQueryOnArrayOfOBjects } from "@core/simple-add-hoc-query";
function simulateIO<T>(x: T): Promise<T> {
return new Promise(resolve => setTimeout(() => resolve(x), Math.random() * 10));
}
/**
* Key Value Store with optimistic locking functionality
*/
export class KeyValueStoreMem<VALUET> implements KeyValueStoreI<VALUET> {
protected db: { [x: string]: VALUET } = {};
getDB() {return this.db}
length() {return Object.keys(this.db).length}
constructor() {
}
public async close() {}
public get(_id: KeyValueObjIdType): Promise<VALUET> {
return simulateIO(_.cloneDeep(this.db[_idAsStr(_id)]));
}
/** querying a map-reduce view must return the results ordered by _id */
public rangeQueryWithKeys(opts: RangeQueryOptsI): Promise<{ _id: KeyValueObjIdType, val: VALUET }[]> {
let ret = _.entries(this.db).filter(([_id, val]) =>
(opts.startkey < _id && _id < opts.endkey)
|| (opts.inclusive_start && _id === opts.startkey)
|| (opts.inclusive_end && _id === opts.endkey)
)
.sort(([keyA, valA], [keyB, valB]) => {
if (keyA < keyB) return -1;
if (keyA > keyB) return 1;
return 0;
})
.map(([_id, val]) => ({ _id: _id, val: val }));
return simulateIO(ret);
}
public rangeQuery(opts: RangeQueryOptsI): Promise<VALUET[]> {
return this.rangeQueryWithKeys(opts)
.then(res => res.map(({ _id, val }) => val));
}
public set(_id: KeyValueObjIdType, obj: VALUET): Promise<VALUET> {
this.db[_idAsStr(_id)] = _.cloneDeep(obj);
return this.get(_id);
}
public async del(_id: KeyValueObjIdType): Promise<VALUET> {
let ret = await this.get(_id);
delete this.db[_idAsStr(_id)];
return simulateIO(ret);
}
public async clearDB() {
this.db = {};
}
all(): Promise<VALUET[]> {
return simulateIO(Object.values(this.db));
}
public info(): Promise<string> {
return simulateIO("in memory test KVS");
}
}
export class KeyObjStoreMem<OBJT extends KeyValueObj> extends KeyValueStoreMem<OBJT> implements KeyObjStoreI<OBJT> {
public findByPrefix(prefix: string): Promise<OBJT[]> {
return this.rangeQuery({ startkey: prefix, endkey: prefix + "\ufff0", inclusive_start: true, inclusive_end: false });
}
public put(obj: OBJT): Promise<OBJT> {
return this.set(obj._id, obj);
}
public putBulk(objs: OBJT[]): Promise<(OBJT | KeyValueError)[]> {
//naive implementation, some databases have specific efficient ways to to bulk insert
return Promise.all(objs.map(o => this.set(o._id, o)));
}
public delBulk(objs: OBJT[]): Promise<(OBJT | KeyValueError)[]> {
//naive implementation, some databases have specific efficient ways to to bulk delete
return Promise.all(objs.map(o => this.del(o._id)));
}
}
export class KeyTableStoreMem<OBJT extends KeyValueObj> extends KeyObjStoreMem<OBJT> implements KeyTableStoreI<OBJT> {
constructor(private schema: Schema, public entity: Entity) {
super();
}
init(): Promise<any> {
return Promise.resolve(); //no-op
}
async updateEntity(entity: Entity) {
}
public async simpleAdHocQuery(query: SimpleAddHocQuery): Promise<any[]> {
//First we filter the rows
let objects: any[] = Object.values(this.db);
let groupedFiltered = simpleAdHocQueryOnArrayOfOBjects(query, objects);
return simulateIO(groupedFiltered);
}
mapQuery(keyExpr: Expression[], opts: RangeQueryOptsI): Promise<OBJT[]> {
let ret = _.entries(this.db).map(([_id, x]) => {
return [kvsKey2Str(evalExpression(x, keyExpr)), x];
}).filter(([key, val]) =>
(opts.startkey < key && key < opts.endkey)
|| (opts.inclusive_start && key === opts.startkey)
|| (opts.inclusive_end && key === opts.endkey)
)
.sort(([keyA, valA], [keyB, valB]) => {
if (keyA < keyB) return -1;
if (keyA > keyB) return 1;
return 0;
})
.map(([_id, val]) => val as OBJT);
return simulateIO(ret);
}
reduceQuery(keyExpr: Expression[], opts: RangeQueryOptsI, valueExpr: Expression, reduceFun: ReduceFun): Promise<ScalarType> {
return this.mapQuery(keyExpr, opts)
.then(rows => rows.map(r => evalExpression(r, valueExpr)))
.then(values => kvsReduceValues(values, reduceFun, this.entity._id, false));
}
}
export class KeyValueStoreFactoryMem implements KeyValueStoreFactoryI {
readonly type = "KeyValueStoreFactoryMem";
metadataStore = new MetadataStore('mem', this);
createKeyValS<VALUET>(name: string, desc: string, valueExample: VALUET): KeyValueStoreI<VALUET> {
return new KeyValueStoreMem<VALUET>();
}
createKeyObjS<OBJT extends KeyValueObj>(name: string): KeyObjStoreI<OBJT> {
return new KeyObjStoreMem<OBJT>();
}
createKeyTableS<OBJT extends KeyValueObj>(schema: Schema, entity: Entity): KeyTableStoreI<OBJT> {
return new KeyTableStoreMem<OBJT>(schema, entity);
}
async executeBatch(callback: () => Promise<any>): Promise<void>{
await callback();
}
async clearAllForTestingPurposes() {
// Mem KV store is ephemeral so nothing to clear
};
public async close() {}
}
| 37.6 | 172 | 0.619149 |
4419ff8b97d4973d38305f9d1db86688eb0ee98f | 158 | py | Python | positive.py | rahul-samal/positive-no-in-a-range | 214e9bc7c31c1e3a72fc25f895914a14842d0f69 | [
"MIT"
] | null | null | null | positive.py | rahul-samal/positive-no-in-a-range | 214e9bc7c31c1e3a72fc25f895914a14842d0f69 | [
"MIT"
] | null | null | null | positive.py | rahul-samal/positive-no-in-a-range | 214e9bc7c31c1e3a72fc25f895914a14842d0f69 | [
"MIT"
] | null | null | null | list1=[12,-7,5,64,-14]
for num in list1:
if num>=0:
print(num,end=" ")
list2=[12,14,-95,3]
for num in list2:
if num>=0:
print(num,end=" ")
| 17.555556 | 24 | 0.537975 |
e5a931cd8c54c15bcc847eec82f7138b77b05af8 | 585 | kt | Kotlin | ecommerce-cart/src/test/kotlin/com/kotato/context/ecommerce/modules/cart/stub/CartCheckedOutEventStub.kt | anjeludo/axon-examples | a3a55431b0de2446c1e0058e22ba68d6baf260bc | [
"MIT"
] | 20 | 2018-02-20T23:48:02.000Z | 2022-01-21T07:56:58.000Z | ecommerce-cart/src/test/kotlin/com/kotato/context/ecommerce/modules/cart/stub/CartCheckedOutEventStub.kt | anjeludo/axon-examples | a3a55431b0de2446c1e0058e22ba68d6baf260bc | [
"MIT"
] | 1 | 2018-02-21T09:28:06.000Z | 2018-02-21T09:28:06.000Z | ecommerce-cart/src/test/kotlin/com/kotato/context/ecommerce/modules/cart/stub/CartCheckedOutEventStub.kt | anjeludo/axon-examples | a3a55431b0de2446c1e0058e22ba68d6baf260bc | [
"MIT"
] | 24 | 2018-02-19T20:06:13.000Z | 2022-03-14T00:28:13.000Z | package com.kotato.context.ecommerce.modules.cart.stub
import com.kotato.context.ecommerce.modules.cart.domain.checkout.CartCheckedOutEvent
import com.kotato.context.ecommerce.modules.order.stub.OrderIdStub
import java.time.ZonedDateTime
class CartCheckedOutEventStub {
companion object {
fun random(aggregateId: String = CartIdStub.random().asString(),
occurredOn: ZonedDateTime = ZonedDateTime.now(),
orderId: String = OrderIdStub.random().asString()) =
CartCheckedOutEvent(aggregateId, occurredOn, orderId)
}
} | 41.785714 | 84 | 0.724786 |
66339ce83f38e414fa9f11177e154a8677ed132f | 8,041 | py | Python | foods3/direct_corn_supply_chain.py | taegon/spatial-scale-lca-us-corn | c76e8477a222e98ff1c28332006447d45b12960f | [
"CNRI-Python"
] | null | null | null | foods3/direct_corn_supply_chain.py | taegon/spatial-scale-lca-us-corn | c76e8477a222e98ff1c28332006447d45b12960f | [
"CNRI-Python"
] | null | null | null | foods3/direct_corn_supply_chain.py | taegon/spatial-scale-lca-us-corn | c76e8477a222e98ff1c28332006447d45b12960f | [
"CNRI-Python"
] | null | null | null | import csv
import os
import numpy as np
from foods3 import util
from gurobipy import *
county_size = 3109
def optimize_gurobi(supply_code, supply_corn, demand_code, demand_corn, dist_mat):
env = Env("gurobi_spatial_lca.log")
model = Model("lp_for_spatiallca")
var = []
# add constraint for corn product
# all flow value bigger than equals 0
no_of_supply = len(supply_code)
no_of_demand = len(demand_code)
var = []
sol = np.zeros(no_of_supply * no_of_demand)
for i, vs in enumerate(supply_code):
for j, vd in enumerate(demand_code):
var.append(model.addVar(0.0, min(supply_corn[i], demand_corn[j]), 0.0, GRB.CONTINUOUS, "S_s[{:d},{:d}]".format(i, j)))
model.update()
print("corn flow constraint = all number positive")
# Set objective: minimize cost
expr = LinExpr()
for i, vs in enumerate(supply_code):
for j, vd in enumerate(demand_code):
expr.addTerms(dist_mat[i][j], var[i * no_of_demand + j])
model.setObjective(expr, GRB.MINIMIZE)
# sum of supply(specific row's all columns) is small than product of corn
# Add constraint
for i, vs in enumerate(supply_code):
expr = LinExpr()
for j, vd in enumerate(demand_code):
expr.addTerms(1.0, var[i * no_of_demand + j])
model.addConstr(expr, GRB.LESS_EQUAL, supply_corn[i], "c{:d}".format(i + 1))
print("sum of corn flow from specific county smaller than total product of that county")
# sum of supply (specific column's all row) is equals to the demand of county
for j, vd in enumerate(demand_code):
expr = LinExpr()
for i, vs in enumerate(supply_code):
expr.addTerms(1.0, var[i * no_of_demand + j])
model.addConstr(expr, GRB.EQUAL, demand_corn[j], "d{:d}".format(j + 1))
print("all constraints are set.")
# Optimize model
model.optimize()
for i, vs in enumerate(supply_code):
for j, vd in enumerate(demand_code):
sol[i * no_of_demand + j] = var[i * no_of_demand + j].x
return sol
def read_csv_int(filename, col_idx):
values = []
with open(filename, "r", encoding='utf-8') as f:
csv_reader = csv.reader(f)
next(csv_reader)
for row in csv_reader:
v = row[col_idx]
values.append(int(v))
return values
def read_csv_float(filename, col_idx):
values = []
with open(filename, "r", encoding='utf-8') as f:
csv_reader = csv.reader(f)
next(csv_reader)
for row in csv_reader:
v = row[col_idx]
v = v.replace(",", "")
# print(v)
if v is None or v == "" or v.strip() == "-":
values.append(0)
else:
values.append(float(v))
return values
def read_csv_float_range(filename, col_idx, col_idx_end):
values = []
with open(filename, "r", encoding='utf-8') as f:
csv_reader = csv.reader(f)
next(csv_reader)
for row in csv_reader:
sum_value = 0.
for col in range(col_idx, col_idx_end):
v = row[col]
v = v.replace(",", "")
if v is None or v == "" or v.strip() == "-":
v = 0
else:
v = float(v)
sum_value += v
values.append(sum_value)
return values
def read_dist_matrix(filename):
matrix = np.zeros((county_size, county_size))
with open(filename, "r") as f:
csv_reader = csv.reader(f)
for i, row in enumerate(csv_reader):
for c in range(county_size):
matrix[i][c] = float(row[c])
return matrix
def expand_list(corn_demand_file, input_file, output_file):
demand = {}
with open(corn_demand_file, "r") as f:
reader = csv.reader(f)
next(reader)
for row in reader:
demand[row[0]] = [float(row[8]), float(row[9]), float(row[10]),
float(row[11]), float(row[12]), float(row[13]),
float(row[7])]
sub_sector = ["layer", "pullet", "turkey", "milkcow", "wetmill", "export", "others"]
data_list = []
with open(input_file, "r") as f:
reader = csv.reader(f)
header = next(reader)
for row in reader:
data_list.append(row)
expanded_list = []
for row in data_list:
if row[0] == "others":
weighted_col_idx = [3,]
target_county = row[1]
total_demand = sum(demand[target_county])
for ss in range(len(sub_sector)):
if total_demand == 0:
weight = 1
else:
weight = demand[target_county][ss] / total_demand
split_row = [row[x] if x not in weighted_col_idx else float(row[x])*weight for x in range(len(row))]
split_row[0] = sub_sector[ss]
if split_row[3] != 0:
expanded_list.append(split_row)
else:
expanded_list.append(row)
with open(output_file, "w") as f:
f.write(",".join(header))
f.write("\n")
for row in expanded_list:
f.write(",".join([str(x) for x in row]))
f.write("\n")
def main(output_filename, demand_filename):
county_code = read_csv_int("../input/county_FIPS.csv", 0)
supply_code = county_code[:]
supply_amount = read_csv_float(demand_filename, 1)
demand_code = []
for i in range(5):
demand_code.extend(county_code)
demand_amount = []
# cattle(0), poultry(1), ethanol(2), hog(3), others(4)
demand_amount.extend(read_csv_float(demand_filename, 3))
demand_amount.extend(read_csv_float(demand_filename, 5))
demand_amount.extend(read_csv_float(demand_filename, 6))
demand_amount.extend(read_csv_float(demand_filename, 4))
demand_amount.extend(read_csv_float_range(demand_filename, 7, 14))
print(sum(supply_amount))
print(sum(demand_amount))
all_imp_filename = "../input/allDist_imp.csv"
dist_imp_all_matrix = read_dist_matrix(all_imp_filename)
dist_mat = np.zeros((len(supply_code), len(demand_code)))
print("making distance matrix")
dist_mat[0:3109, 0 + 0 * 3109:3109 * 1] = dist_imp_all_matrix
dist_mat[0:3109, 0 + 1 * 3109:3109 * 2] = dist_imp_all_matrix
dist_mat[0:3109, 0 + 2 * 3109:3109 * 3] = dist_imp_all_matrix
dist_mat[0:3109, 0 + 3 * 3109:3109 * 4] = dist_imp_all_matrix
dist_mat[0:3109, 0 + 4 * 3109:3109 * 5] = dist_imp_all_matrix
print("run simulation model")
sol = optimize_gurobi(supply_code, supply_amount, demand_code, demand_amount, dist_mat)
no_of_demand = len(demand_code)
sector_name = ("cattle", "broiler", "ethanol", "hog", "others")
with open(output_filename, "w") as f:
headline = [
"sector", "demand_county", "corn_county", "corn_bu",
]
f.write(",".join(headline))
f.write("\n")
for i, v in enumerate(sol):
if v > 0:
sector = (i % no_of_demand) // county_size
src_county_idx = i // no_of_demand
des_county_idx = i % no_of_demand % county_size
supply_corn_bu = v
src_county_fips = county_code[src_county_idx]
des_county_fips = county_code[des_county_idx]
f.write("{},{},{},{}\n".format(sector_name[sector], des_county_fips, src_county_fips, supply_corn_bu))
if __name__ == '__main__':
ROOT_DIR = util.get_project_root()
output_dir = ROOT_DIR / "output"
if not os.path.exists(output_dir):
os.mkdir(output_dir)
corn_flow_filename = "../output/corn_flow_county_scale_major_category.csv"
corn_demand_filename = "../input/corn_demand_2012.csv"
main(corn_flow_filename, corn_demand_filename)
expand_list(corn_demand_filename,
corn_flow_filename,
"../output/impacts_scale_county_all_category.csv")
| 33.644351 | 130 | 0.596443 |
1a636e14697518b751cb3627dbe79d20cd05a390 | 420 | py | Python | Fundamentos de Algoritmos - Python/adivinhador/teste3.py | JuanLiraEst/Labs_Atividades_FEI | a73b31a4b5ebb87de02034b90fbb430afbb7a981 | [
"MIT"
] | null | null | null | Fundamentos de Algoritmos - Python/adivinhador/teste3.py | JuanLiraEst/Labs_Atividades_FEI | a73b31a4b5ebb87de02034b90fbb430afbb7a981 | [
"MIT"
] | null | null | null | Fundamentos de Algoritmos - Python/adivinhador/teste3.py | JuanLiraEst/Labs_Atividades_FEI | a73b31a4b5ebb87de02034b90fbb430afbb7a981 | [
"MIT"
] | null | null | null | lista = [4,14,24,34,44]
q4 = int(input("a soma dos dois dígitos é igual a?"))
numero_final = []
for x in range(len(lista)):
valor = str(lista[x])
dig_um = int(valor[0])
if lista[x]>9:
dig_dois = int(valor[1])
if lista[x]<10:
if dig_um== q4:
numero_final.append(lista[x])
elif dig_um + dig_dois == q4:
numero_final.append(lista[x])
print(numero_final) | 21 | 53 | 0.57381 |
79540e5a8c528eb6be4a0ce92bec4ca21b894303 | 59,672 | php | PHP | commonfunc.php | bee7813993/KaraokeRequestorWeb | 157e789ba259ffb240e807d7089ce9b6404d4df0 | [
"Apache-2.0"
] | 8 | 2016-11-16T16:29:25.000Z | 2021-05-05T17:10:53.000Z | commonfunc.php | bee7813993/KaraokeRequestorWeb | 157e789ba259ffb240e807d7089ce9b6404d4df0 | [
"Apache-2.0"
] | 107 | 2015-01-25T15:41:00.000Z | 2021-12-12T08:56:18.000Z | commonfunc.php | bee7813993/KaraokeRequestorWeb | 157e789ba259ffb240e807d7089ce9b6404d4df0 | [
"Apache-2.0"
] | 3 | 2015-05-03T01:16:21.000Z | 2022-02-11T16:32:23.000Z | <?php
require_once 'kara_config.php';
require_once 'prioritydb_func.php';
//require_once("getid3/getid3.php");
$showsonglengthflag = 0;
$user='normal';
if (isset($_SERVER['PHP_AUTH_USER'])){
if ($_SERVER['PHP_AUTH_USER'] === 'admin'){
// print '管理者ログイン中<br>';
$user=$_SERVER['PHP_AUTH_USER'];
}
}
if (isset($_SERVER) && isset($_SERVER["SERVER_ADDR"]) ){
//var_dump($_SERVER);
$everythinghost = $_SERVER["SERVER_ADDR"];
$count_semi = substr_count($everythinghost, ':');
$count_dot = substr_count($everythinghost, '.');
if($count_semi > 0 && $count_dot == 0) {
$everythinghost = addipv6blanket($everythinghost);
}
} else {
$everythinghost = 'localhost';
}
function addipv6blanket($ipv6addr){
if( (mb_substr($ipv6addr,0,1) == '[' ) and (mb_substr($ipv6addr,-1,1) == ']' ) ) {
return $ipv6addr;
} else {
return '['.$ipv6addr.']';
}
}
/* ビンゴ機能が有効かどうか */
$usebingo=false;
if(array_key_exists("usebingo",$config_ini)){
if($config_ini["usebingo"]==1 ){
$usebingo=true;
}
}
/**
* createUri
* 相対パスから絶対URLを返します
*
* @param string $base ベースURL(絶対URL)
* @param string $relational_path 相対パス
* @return string 相対パスの絶対URL
* @link http://blog.anoncom.net/2010/01/08/295.html/comment-page-1
*/
function createUri( $base, $relationalPath )
{
$parse = array(
"scheme" => null,
"user" => null,
"pass" => null,
"host" => null,
"port" => null,
"query" => null,
"fragment" => null
);
$parse = parse_url( $base );
//var_dump($parse);
if( strpos($parse["path"], "/", (strlen($parse["path"])-1)) !== false ){
$parse["path"] .= ".";
}
if( preg_match("#^https?://#", $relationalPath) ){
return $relationalPath;
}else if( preg_match("#^/.*$#", $relationalPath) ){
return $parse["scheme"] . "://" . $parse["host"] . $relationalPath;
}else{
$basePath = explode("/", str_replace('\\', '/', dirname($parse["path"])));
//var_dump($basePath);
if(empty($basePath[1] )) {
unset($basePath[1]);
}
$relPath = explode("/", $relationalPath);
//var_dump($relPath);
foreach( $relPath as $relDirName ){
if( $relDirName == "." ){
array_shift( $basePath );
array_unshift( $basePath, "" );
}else if( $relDirName == ".." ){
array_pop( $basePath );
if( count($basePath) == 0 ){
$basePath = array("");
}
}else{
array_push($basePath, $relDirName);
}
}
//var_dump($basePath);
$path = implode("/", $basePath);
//print $path;
return $parse["scheme"] . "://" . $parse["host"] . $path;
}
}
function is_valid_url($url)
{
return false !== filter_var($url, FILTER_VALIDATE_URL) && preg_match('@^https?+://@i', $url);
}
function file_get_html_with_retry($url, $retrytimes = 5, $timeoutsec = 1, $ipvar = 4){
$errno = 0;
for($loopcount = 0 ; $loopcount < $retrytimes ; $loopcount ++){
$ch=curl_init($url);
curl_setopt($ch, CURLOPT_HEADER, false);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_USERAGENT, "Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko");
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, $timeoutsec);
curl_setopt($ch, CURLOPT_TIMEOUT, $timeoutsec);
curl_setopt($ch, CURLOPT_FAILONERROR, true);
//リダイレクト先追従
//Locationをたどる
curl_setopt($ch,CURLOPT_FOLLOWLOCATION,true);
//最大何回リダイレクトをたどるか
curl_setopt($ch,CURLOPT_MAXREDIRS,4);
//リダイレクトの際にヘッダのRefererを自動的に追加させる
curl_setopt($ch,CURLOPT_AUTOREFERER,true);
if($ipvar == 6){
curl_setopt($ch, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V6 );
}else if($ipvar == 4){
curl_setopt($ch, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V4 );
}else{
curl_setopt($ch, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V4 );
}
$contents = curl_exec($ch);
//var_dump($contents); //debug
if( $contents !== false) {
curl_close($ch);
break;
}
$errno = curl_errno($ch);
// print $timeoutsec;
curl_close($ch);
}
if ($loopcount === $retrytimes) {
$error_message = curl_strerror($errno);
#throw new ErrorException( 'http connection error : '.$error_message . ' url : ' . $url . "\n");
}
return $contents;
}
/** あいまいな文字を+に置換する
*/
function replace_obscure_words($word)
{
// 括弧削除 "/[ ]*\(.*?\)[ ]*/u";
$resultwords = preg_replace("/[ ]*\(.*?\)[ ]*/u",' ',$word);
// あいまい単語リスト
/*** 外部ファイル ignorecharlist.txt に移動
$obscure_list = array(
"★",
"☆",
"?",
"?",
"×",
"!",
"!",
':',
':',
'~',
'・',
'*',
'_',
'"',
'&'
);
***/
$obscure_list = file ( "ignorecharlist.txt" , FILE_IGNORE_NEW_LINES | FILE_SKIP_EMPTY_LINES );
// あいまい単語置換(スペースに)
$resultwords = str_replace($obscure_list,' ',$resultwords);
// 最後がスペースだったら取り除き
$resultwords = rtrim($resultwords);
// 単語が6文字以下の場合クォーテーションをつける
if(strlen($word) <= 6){
//$resultwords = '"'.$resultwords.'"';
}
return $resultwords;
}
/**
* バイト数をフォーマットする
* @param integer $bytes
* @param integer $precision
* @param array $units
*/
function formatBytes($bytes, $precision = 2, array $units = null)
{
if ( abs($bytes) < 1024 )
{
$precision = 0;
}
if ( is_array($units) === false )
{
$units = array('B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB');
}
if ( $bytes < 0 )
{
$sign = '-';
$bytes = abs($bytes);
}
else
{
$sign = '';
}
$exp = floor(log($bytes) / log(1024));
$exp = 2; // MB固定
$unit = $units[$exp];
$bytes = $bytes / pow(1024, floor($exp));
$bytes = sprintf('%.'.$precision.'f', $bytes);
return $sign.$bytes.' '.$unit;
}
// プライオリティテーブルの取得
function get_prioritytable()
{
$rearchedlist_addpriority = array();
foreach($rearchedlist["results"] as $k=>$v){
$onefileinfo = array();
$onefileinfo += array('path' => $v['path']);
$onefileinfo += array('name' => $v['name']);
$onefileinfo += array('size' => $v['size']);
$c_priority = -1;
foreach($prioritylist as $pk=>$pv){
$searchres = false;
if($pv['kind'] == 2 ) {
$searchres = mb_strstr($v['name'],$pv['priorityword']);
}else{
$searchres = mb_strstr($v['path'],$pv['priorityword']);
}
if ( $searchres != false ){
if($c_priority < $pv['prioritynum'] ){
$c_priority = $pv['prioritynum'];
}
}
}
if($c_priority == -1) $c_priority = 50;
$onefileinfo += array('priority' => $c_priority);
$rearchedlist_addpriority[] = $onefileinfo ;
}
}
function get_globalipv4(){
// IP取得URL (どこかいいサイトがあったら教えて)
$checkglobakurl = 'http://vps.pcgame-r18.jp/ddns/checkip.php';
return file_get_html_with_retry($checkglobakurl);
}
function check_online_available($host,$timeout = 2){
global $config_ini;
if($config_ini["connectinternet"] == 1){
$checkurl = 'http://vps.pcgame-r18.jp/ddns/online_yukari_check.php?host='.$host.'&timeout='.$timeout;
$ret = file_get_html_with_retry($checkurl,2,4);
return $ret;
}
return "now disabled online";
}
function check_access_from_online(){
global $config_ini;
if(array_key_exists("globalhost", $config_ini)) {
if(strpos ($config_ini["globalhost"],$_SERVER["SERVER_NAME"])!==false){
return true;
}
}
return false;
}
function check_json_available_fromurl($url,$timeout = 10){
$jsonbase = file_get_html_with_retry($url,5,$timeout);
$result = json_decode($jsonbase);
if($result === null ) return false;
else return true;
}
// 検索ワードからeverything検索件数だけ取得
function count_onepriority($word)
{
global $everythinghost;
$jsonurl = 'http://' . $everythinghost . ':81/?search=' . urlencode($word) . '&json=1&count=5';
//print $jsonurl.'<br/>';
$json = file_get_html_with_retry($jsonurl, 5, 30);
$result_array = json_decode($json, true);
return $result_array['totalResults'];
}
// プライオリティリストからプライオリティ順にしてプライオリティ無指定50を追加
function orderprioritylist($prioritylist){
array_multisort(array_column($prioritylist, 'prioritynum' ),SORT_DESC,$prioritylist);
$otherstr = "";
foreach($prioritylist as $prioritylistone){
if(empty($otherstr)){
if($prioritylistone["kind"] == 2 ) { // file
$otherstr = '!file:"'.$prioritylistone['priorityword'].'"';
}else {
$otherstr = '!path:"'.$prioritylistone['priorityword'].'"';
}
}else {
if($prioritylistone["kind"] == 2 ) { // file
$otherstr = $otherstr. ' !file:"'.$prioritylistone['priorityword'].'"';
}else {
$otherstr = $otherstr. ' !path:"'.$prioritylistone['priorityword'].'"';
}
}
}
$i = 0;
$c_priority = null;
$newpriorityword = '';
$newprioritylist = array();
foreach($prioritylist as $prioritylistone){
if($prioritylistone['prioritynum'] < 50 ){
break;
}
$i++;
}
if(empty($otherstr)){
return($prioritylist);
}
$ndarray = array( 'id' => 999, 'kind' => 1, 'priorityword' => $otherstr, 'prioritynum' => 50 );
//array_splice($prioritylist, $i, 0, array($ndarray));
$c_priority = null;
$newpriorityword = '';
$newprioritylist = array();
foreach($prioritylist as $prioritylistone){
if($c_priority == $prioritylistone['prioritynum']){
if($prioritylistone["kind"] == 2 ) { // file
$newpriorityword = $newpriorityword.'|file:'.$prioritylistone['priorityword'].'';
}else {
$newpriorityword = $newpriorityword.'|path:'.$prioritylistone['priorityword'].'';
}
}else {
if(!empty($newpriorityword)){
$newprioritylist[] = array( 'prioritynum' => $c_priority, 'priorityword' => '<'.$newpriorityword.'>' );
}
$c_priority = $prioritylistone['prioritynum'];
if($prioritylistone["kind"] == 2 ) { // file
$newpriorityword = 'file:'.$prioritylistone['priorityword'].'';
}else{
$newpriorityword = 'path:'.$prioritylistone['priorityword'].'';
}
}
}
$newprioritylist[] = array( 'prioritynum' => $c_priority, 'priorityword' => '<'.$newpriorityword.'>' );
array_splice($newprioritylist, $i, 0, array($ndarray));
return $newprioritylist;
}
// 検索ワードからプライオリティ順にして$start件目から$length件取得
function search_order_priority($word,$start,$length)
{
global $priority_db;
global $everythinghost;
$currentnum = 0;
$pickup_array = array();
$return_array = array();
$totalcount = count_onepriority($word);
$prioritylist = prioritydb_get($priority_db);
$prioritylist=orderprioritylist($prioritylist);
// var_dump($prioritylist);
// die();
$r_length = $length; // 残要求件数
$r_start = $start; // 残件開始位置
$count_p = $start + 1 ; //
$a = 0;
//var_dump($word);
foreach($prioritylist as $prioritylistone){
$kerwords = ''.$word.' '.$prioritylistone['priorityword'];
$pcount = count_onepriority($kerwords); //そのプライオリティの件数
if($pcount <= 0 ){
// print '### non P:'.$prioritylistone['prioritynum'].' W:'.$prioritylistone['priorityword']."\n";
continue;
}
// print '#### P:'.$prioritylistone['prioritynum'].' currentnum:'.$currentnum.' r_start:'.$r_start.' pcount:'.$pcount.' r_length:'.$r_length."\n";
if( ($currentnum <= $r_start ) && ( $currentnum + $pcount ) > $r_start ){
$c_start = $r_start - $currentnum;
if( ($r_start + $r_length) > ($currentnum + $pcount) ){ // 要求件数が残件を超えている場合
$c_length = $currentnum + $pcount - $r_start; // 現在の位置からその優先度の数
$r_length = $r_length - $c_length;
$currentnum = $currentnum + $pcount;
$r_start = $currentnum;
}else {
$c_length = $r_length;
$r_length = 0;
}
$order = 'sort=size&ascending=0';
$jsonurl = 'http://' . $everythinghost . ':81/?search=' . urlencode($kerwords) . '&'. $order . '&path=1&path_column=3&size_column=4&case=0&json=1&count=' . $c_length . '&offset=' .$c_start.'';
//print $jsonurl;
$json = file_get_html_with_retry($jsonurl, 5, 30);
$result_array = json_decode($json, true);
// print '### P:'.$prioritylistone['prioritynum'].' W:'.$prioritylistone['priorityword']."\n";
// print '##### P:'.$prioritylistone['prioritynum'].' offset:'.$c_start.' count'.$c_length."\n";
// priority番号追加
$resultslist_withp = array();
foreach($result_array['results'] as $v) {
$resultslist_withp[] = ( $v + array("pcount" => $count_p ) );
$count_p++;
}
$pickup_array = array_merge ($pickup_array,$resultslist_withp);
// var_dump($resultslist_withp);
if($r_length == 0) break;
}else {
$currentnum = $currentnum + $pcount;
}
}
$return_array = array( "totalResults" => $totalcount , "results" => $pickup_array );
return $return_array;
}
// 検索ワードから検索結果一覧を取得する処理
function searchlocalfilename_part($kerwords, &$result_array,$start = 0, $length = 10, $order = null, $path = null)
{
global $everythinghost;
global $config_ini;
global $priority_db;
$prioritylist = prioritydb_get($priority_db);
// IPv6check
$askeverythinghost = $everythinghost;
if(array_key_exists("max_filesize", $config_ini)){
if( $config_ini["max_filesize"] > 0 ){
$filesizebyte = $config_ini["max_filesize"] * 1024 * 1024;
$kwlist=preg_split('/[\s|\x{3000}]+/u', $kerwords);
$wordpart = "";
foreach($kwlist as $wd){
if(!empty($wordpart)) {
$wordpart = $wordpart.' ';
}
$wordpart = $wordpart.'path:'.$wd;
}
if( !empty($wordpart)) {
$kerwords = $wordpart.' size:<='.$filesizebyte;
}else {
$kerwords = 'path:'.$kerwords.' size:<='.$filesizebyte;
}
}
}
$orderstr = 'sort=size&ascending=0';
//var_dump($order);
if(empty($prioritylist)){
$orderstr = 'sort=size&ascending=0';
if(empty($order)){
$orderstr = 'sort=size&ascending=0';
}else if($order[0]['column']==3 ){
if($order[0]['dir']=='asc'){
$orderstr='sort=size&ascending=1';
}else {
$orderstr='sort=size&ascending=0';
}
}else if($order[0]['column']==2 ){
if($order[0]['dir']=='asc'){
$orderstr='sort=name&ascending=1';
}else {
$orderstr='sort=name&ascending=0';
}
}else if($order[0]['column']==4 ){
if($order[0]['dir']=='asc'){
$orderstr='sort=path&ascending=1';
}else {
$orderstr='sort=path&ascending=0';
}
}
}else {
if(empty($order)){
$result_array = search_order_priority($kerwords,$start,$length);
return $result_array;
}else if($order[0]['column']==3 ){
if($order[0]['dir']=='asc'){
$orderstr='sort=size&ascending=1';
}else {
$orderstr='sort=size&ascending=0';
}
}else if($order[0]['column']==2 ){
if($order[0]['dir']=='asc'){
$orderstr='sort=name&ascending=1';
}else {
$orderstr='sort=name&ascending=0';
}
}else if($order[0]['column']==4 ){
if($order[0]['dir']=='asc'){
$orderstr='sort=path&ascending=1';
}else {
$orderstr='sort=path&ascending=0';
}
}else {
$result_array = search_order_priority($kerwords,$start,$length);
return $result_array;
}
}
$jsonurl = 'http://' . $everythinghost . ':81/?search=' . urlencode($kerwords) . '&'. $orderstr . '&path=1&path_column=3&size_column=4&case=0&json=1&count=' . $length . '&offset=' .$start.'';
$json = file_get_html_with_retry($jsonurl, 5, 30);
$result_array = json_decode($json, true);
}
// 検索ワードから検索結果一覧を取得する処理
function searchlocalfilename($kerwords, &$result_array,$order = null, $path = null)
{
global $everythinghost;
global $config_ini;
//var_dump($config_ini);
if(empty($order)){
$order = 'sort=size&ascending=0';
}
// IPv6check
$askeverythinghost = $everythinghost;
if(array_key_exists("max_filesize", $config_ini)){
if( $config_ini["max_filesize"] > 0 ){
$filesizebyte = $config_ini["max_filesize"] * 1024 * 1024;
$kerwords = $kerwords.' size:<='.$filesizebyte;
}
}
$jsonurl = "http://" . $askeverythinghost . ":81/?search=" . urlencode($kerwords) . "&". $order . "&path=1&path_column=3&size_column=4&case=0&json=1";
// echo $jsonurl;
$json = file_get_html_with_retry($jsonurl, 5, 30);
// echo $json;
$result_array = json_decode($json, true);
}
//検索結果一覧を表示する処理
function printsonglists($result_array, $tableid)
{
global $everythinghost;
global $showsonglengthflag;
$user='normal';
if (isset($_SERVER['PHP_AUTH_USER'])){
if ($_SERVER['PHP_AUTH_USER'] === 'admin'){
// print '管理者ログイン中<br>';
$user=$_SERVER['PHP_AUTH_USER'];
}
}
if($showsonglengthflag == 1 ){
$getID3 = new getID3();
$getID3->setOption(array('encoding' => 'UTF-8'));
}
print "<table id=\"$tableid\" class=\"searchresult\" >";
print "<thead>\n";
print "<tr>\n";
print '<th>No. <font size="-2" class="searchresult_comment">(おすすめ順)</font></th>'."\n";
print "<th>リクエスト </th>\n";
print "<th>ファイル名(プレビューリンク) </th>\n";
print "<th>サイズ </th>\n";
if($showsonglengthflag == 1 ){
print "<th>再生時間 </th>\n";
}
print "<th>パス </th>\n";
print "</tr>\n";
print "</thead>\n";
print "<tbody>\n";
foreach($result_array["results"] as $k=>$v)
{
if($v['size'] <= 1 ) continue;
if($showsonglengthflag == 1 ){
try{
$sjisfilename = addslashes(mb_convert_encoding($v['path'] . "\\" . $v['name'], "cp932", "utf-8"));
//print $sjisfilename."\n";
$music_info = @$getID3->analyze($sjisfilename);
getid3_lib::CopyTagsToComments($music_info);
}catch (Exception $e) {
print $sjisfilename."\n";
}
if(empty($music_info['playtime_string'])){
$length_str = 'Unknown';
}else {
$length_str = $music_info['playtime_string'];
}
}
echo "<tr><td class=\"no\">$k "."</td>";
echo "<td class=\"reqbtn\">";
echo "<form action=\"request_confirm.php\" method=\"post\" >";
echo "<input type=\"hidden\" name=\"filename\" id=\"filename\" value=\"". $v['name'] . "\" />";
echo "<input type=\"hidden\" name=\"fullpath\" id=\"fullpath\" value=\"". $v['path'] . "\\" . $v['name'] . "\" />";
echo "<input type=\"submit\" value=\"リクエスト\" />";
echo "</form>";
echo "</td>";
echo "<td class=\"filename\">";
echo htmlspecialchars($v['name']);
if($user == 'admin' ) {
echo "<br/>おすすめ度 :".$v['priority'];
}
$previewpath = "http://" . $everythinghost . ":81/" . $v['path'] . "/" . $v['name'];
echo "<div Align=\"right\">";
print make_preview_modal($previewpath,$k);
echo "</div>";
// echo "<Div Align=\"right\"><A HREF = \"preview.php?movieurl=" . $previewpath . "\" >";
// echo "プレビュー";
// echo " </A></Div>";
echo "</td>";
echo "<td class=\"filesize\">";
echo formatBytes($v['size']);
echo "</td>";
if($showsonglengthflag == 1 ){
echo "<td class=\"length\">";
echo $length_str;
echo "</td>";
}
echo "<td class=\"filepath\">";
echo htmlspecialchars($v['path']);
echo "</td>";
echo "</tr>";
}
print "</tbody>\n";
echo "</table>";
echo "\n\n";
}
function addpriority($priority_db,$rearchedlist)
{
$prioritylist = prioritydb_get($priority_db);
$rearchedlist_addpriority = array();
// var_dump($rearchedlist["results"]);
foreach($rearchedlist["results"] as $k=>$v){
//print "<br>";
// var_dump($v);
$onefileinfo = array();
$onefileinfo += array('path' => $v['path']);
$onefileinfo += array('name' => $v['name']);
$onefileinfo += array('size' => $v['size']);
if(array_key_exists("pcount", $v)) {
$onefileinfo += array('pcount' => $v['pcount']);
}
$c_priority = -1;
foreach($prioritylist as $pk=>$pv){
$searchres = false;
if($pv['kind'] == 2 ) {
$searchres = mb_strstr($v['name'],$pv['priorityword']);
}else{
$searchres = mb_strstr($v['path'],$pv['priorityword']);
}
if ( $searchres != false ){
if($c_priority < $pv['prioritynum'] ){
$c_priority = $pv['prioritynum'];
}
}
}
if($c_priority == -1) $c_priority = 50;
$onefileinfo += array('priority' => $c_priority);
$rearchedlist_addpriority[] = $onefileinfo ;
//print "<br>";
//var_dump($rearchedlist_addpriority);
}
//print "<br>";
//var_dump($rearchedlist_addpriority);
foreach ($rearchedlist_addpriority as $key => $row) {
//var_dump($key);
//var_dump($row);
$priority_s[$key] = $row['priority'];
$size_s[$key] = $row['size'];
}
return( array( 'totalResults' => $rearchedlist['totalResults'], 'results' => $rearchedlist_addpriority));
}
function sortpriority($priority_db,$rearchedlist)
{
$prioritylist = prioritydb_get($priority_db);
$rearchedlist_addpriority = array();
// var_dump($rearchedlist["results"]);
foreach($rearchedlist["results"] as $k=>$v){
//print "<br>";
//var_dump($v);
$onefileinfo = array();
$onefileinfo += array('path' => $v['path']);
$onefileinfo += array('name' => $v['name']);
$onefileinfo += array('size' => $v['size']);
$c_priority = -1;
foreach($prioritylist as $pk=>$pv){
$searchres = false;
if($pv['kind'] == 2 ) {
$searchres = mb_strstr($v['name'],$pv['priorityword']);
}else{
$searchres = mb_strstr($v['path'],$pv['priorityword']);
}
if ( $searchres != false ){
if($c_priority < $pv['prioritynum'] ){
$c_priority = $pv['prioritynum'];
}
}
}
if($c_priority == -1) $c_priority = 50;
$onefileinfo += array('priority' => $c_priority);
$rearchedlist_addpriority[] = $onefileinfo ;
//print "<br>";
//var_dump($rearchedlist_addpriority);
}
//print "<br>";
//var_dump($rearchedlist_addpriority);
foreach ($rearchedlist_addpriority as $key => $row) {
//var_dump($key);
//var_dump($row);
$priority_s[$key] = $row['priority'];
$size_s[$key] = $row['size'];
}
// priorityとsizeでsortする。
array_multisort($priority_s,SORT_DESC,$size_s,SORT_DESC,$rearchedlist_addpriority);
return( array( 'totalResults' => $rearchedlist['totalResults'], 'results' => $rearchedlist_addpriority));
}
// 検索ワードからファイル一覧を表示するまでの処理
function PrintLocalFileListfromkeyword($word,$order = null, $tableid='searchresult')
{
global $priority_db;
searchlocalfilename($word,$result_a,$order);
echo $result_a["totalResults"]."件<br />";
if( $result_a["totalResults"] >= 1) {
$result_withp = sortpriority($priority_db,$result_a);
printsonglists($result_withp,$tableid);
}
}
function PrintLocalFileListfromkeyword_ajax($word,$order = null, $tableid='searchresult',$bgvmode = 0, $selectid = '')
{
global $priority_db;
// searchlocalfilename($word,$result_a,$order);
searchlocalfilename_part($word,$result_a,0,10,$order);
if(empty($bgvmode)){
$bgvmode = 0;
}
if( $result_a["totalResults"] >= 1) {
// $result_withp = sortpriority($priority_db,$result_a);
// echo $result_a["totalResults"]."件<br />";
// print javascript
//
$printjs = <<<EOD
<script type="text/javascript">
$(document).ready(function(){
var element = document.getElementById( "%s" ) ;
var rect = element.getBoundingClientRect() ;
$('#%s').dataTable({
"processing": true,
"serverSide": true,
"ajax": {
"url": "searchfilefromkeyword_json_part.php",
"type": "POST",
"data": { keyword:"%s", bgvmode:%s, selectid:%s },
"dataType": 'json',
"dataSrc": "data",
},
"drawCallback": function( settings ) {
$("html,body").animate({scrollTop:rect.top},100);
},
"bPaginate" : true,
"lengthMenu": [[50, 10, 100, 1000], [50, 10, 100, 1000]],
"bStateSave" : true,
"stateSaveParams" : function (settings, data) {
data.start = 0;
},
"autoWidth": false,
"columns" : [
{ "data": "no", "className":"no"},
{ "data": "reqbtn", "className":"reqbtn"},
{ "data": "filename", "className":"filename"},
{ "data": "filesize", "className":"filesize"},
{ "data": "filepath", "className":"filepath"},
],
"sDom": '<"H"lrip>t<"F"ip>',
columnDefs: [
{ type: 'currency', targets: [3] },
{ "orderable": false , targets: [1]}
],
}
);
});
</script>
EOD;
if(empty($selectid)){
$selectid = '"none"';
}
echo sprintf($printjs,$tableid,$tableid,addslashes($word),$bgvmode,$selectid);
// print table_base
$printtablebase = <<<EOD
<table id="%s" class="searchresult">
<thead>
<tr>
<th>No. <font size="-2" class="searchresult_comment">(おすすめ順)</font></th>
<th>リクエスト </th>
<th>ファイル名(プレビューリンク) </th>
<th>サイズ </th>
<th>パス </th>
</tr>
</thead>
<tbody>
</tbody>
</table>
EOD;
echo sprintf($printtablebase,$tableid);
}
}
// 検索結果の件数だけ表示する処理
function searchresultcount_fromkeyword($word)
{
global $priority_db;
searchlocalfilename($word,$result_a);
return $result_a["totalResults"];
}
function selectplayerfromextension($filepath)
{
$extension = pathinfo($filepath, PATHINFO_EXTENSION);
if( strcasecmp($extension,"mp3") == 0
|| strcasecmp($extension,"m4a") == 0
|| strcasecmp($extension,"wav") == 0 ){
$player="foobar";
}else {
$player="mpc";
}
return $player;
}
function getcurrentplayer(){
global $db;
$sql = "SELECT * FROM requesttable WHERE nowplaying = \"再生中\" OR nowplaying = \"再生開始待ち\" ORDER BY reqorder ASC ";
$select = $db->query($sql);
$currentsong = $select->fetchAll(PDO::FETCH_ASSOC);
$select->closeCursor();
//var_dump($currentsong);
if(count($currentsong) == 0){
return "none";
}else{
$player=selectplayerfromextension($currentsong[0]['fullpath']);
}
return $player;
}
function getcurrentid(){
global $db;
$sql = "SELECT * FROM requesttable WHERE nowplaying = \"再生中\" ORDER BY reqorder ASC ";
$select = $db->query($sql);
$currentsong = $select->fetchAll(PDO::FETCH_ASSOC);
$select->closeCursor();
//var_dump($currentsong);
if(count($currentsong) == 0){
return "none";
}else{
$nowid=$currentsong[0]['id'];
}
return $nowid;
}
function countafterplayingitem(){
global $db;
$curid = getcurrentid();
if($curid === 'none') return 0;
$sql = 'SELECT * FROM requesttable WHERE reqorder >= (SELECT reqorder FROM requesttable WHERE id = '.$curid.');' ;
$select = $db->query($sql);
$items = $select->fetchAll(PDO::FETCH_ASSOC);
$select->closeCursor();
//var_dump($currentsong);
return count($items);
}
function selectedcheck($definevalue, $checkvalue){
if(strcmp($definevalue,$checkvalue) == 0) {
return 'selected';
}
return ' ';
}
function print_meta_header(){
print '<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />';
print "\n";
print '<meta http-equiv="Content-Style-Type" content="text/css" />';
print "\n";
print '<meta http-equiv="Content-Script-Type" content="text/javascript" />';
print "\n";
print '<meta name="viewport" content="width=device-width,initial-scale=1.0" />';
print "\n";
}
function makesongnamefromfilename($filename){
// 【ニコカラ* 】を外す
$patstr="\(ニコカラ.*?\)|(ニコカラ.*?)|【ニコカラ.*?】|\[ニコカラ.*?\]";
$repstr="";
$str=mb_ereg_replace($patstr, $repstr, $filename);
// 拡張子を外す
$patstr="/(.+)(\.[^.]+$)/";
return preg_replace($patstr, "$1", $str);
}
function searchwordhistory($word,$filename = 'history.log'){
date_default_timezone_set('Asia/Tokyo');
$fp = fopen($filename, 'a');
$logword = date('r').' '.$word."\r\n";
fwrite($fp,$logword);
fclose($fp);
}
// return singer from IP
function singerfromip($rt)
{
$rt_i = array_reverse($rt);
foreach($rt_i as $row){
if($row['clientip'] === $_SERVER["REMOTE_ADDR"] ) {
if($row['clientua'] === $_SERVER["HTTP_USER_AGENT"] ) {
return $row['singer'];
}
}
}
return " ";
}
function commentpost_v3($nm,$col,$size,$msg,$commenturl)
{
$commentmax=256;
if(mb_strlen($msg) > $commentmax){
$msg = mb_substr($msg,0,$commentmax);
}
$POST_DATA = array(
'nm' => $nm,
'col' => $col,
'sz' => $size,
'msg' => $msg
);
$curl=curl_init(($commenturl));
curl_setopt($curl,CURLOPT_POST, TRUE);
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_POSTFIELDS, http_build_query($POST_DATA));
$output= curl_exec($curl);
if($output === false){
return false;
}else{
return true;
}
}
function getallrequest_array(){
global $db;
$sql = "SELECT * FROM requesttable ORDER BY reqorder DESC";
$select = $db->query($sql);
$allrequest = $select->fetchAll(PDO::FETCH_ASSOC);
$select->closeCursor();
return $allrequest;
}
function returnusername($rt){
if(empty($rt)){
return "";
}
$rt_i = array_reverse($rt);
foreach($rt_i as $row){
if($row['clientip'] === $_SERVER["REMOTE_ADDR"] ) {
if($row['clientua'] === $_SERVER["HTTP_USER_AGENT"] ) {
return $row['singer'];
}
}
}
return "";
}
function returnusername_self(){
$allrequest = getallrequest_array ();
return returnusername($allrequest);
}
function shownavigatioinbar($page = 'none', $prefix = '' ){
global $helpurl;
global $user;
global $config_ini;
global $usebingo;
if($page == 'none') {
$page = basename($_SERVER["PHP_SELF"]);
}
print '<nav class="navbar navbar-inverse navbar-fixed-top">';
print <<<EOD
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#gnavi">
<span class="sr-only">メニュー</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
EOD;
if(multiroomenabled()){
// リンクが存在するかチェック用javascript
print <<<EOD
<script type="text/javascript">
function createXMLHttpRequest() {
if (window.XMLHttpRequest) {
return new XMLHttpRequest();
} else if (window.ActiveXObject) {
try {
return new ActiveXObject("Msxml2.XMLHTTP");
} catch (e) {
try {
return new ActiveXObject("Microsoft.XMLHTTP");
} catch (e2) {
return null;
}
}
} else {
return null;
}
}
function _delete_element( id_name ){
var dom_obj = document.getElementById(id_name);
var dom_obj_parent = dom_obj.parentNode;
dom_obj_parent.removeChild(dom_obj);
}
function check_yukari_available ( yukarihost , id ) {
var http = new createXMLHttpRequest();;
url = '' + yukarihost + '/check.html';
http.open("GET", url, false);
http.send();
if( http.status != 200 ) {
_delete_element(id);
}
}
</script>
EOD;
print ' <ul class="nav navbar-nav navbar-brand-dropdown">';
print ' <li class="dropdown">';
reset($config_ini["roomurl"]);
$roominfo = each($config_ini["roomurl"]);
print ' <a href="#" class="navbar-brand dropdown-toggle" data-toggle="dropdown" href="">'.$roominfo["key"] .'部屋 <b class="caret"></b></a>';
print ' <ul class="dropdown-menu">';
reset($config_ini["roomurl"]);
while($roominfo = each($config_ini["roomurl"])){
if(!empty($roominfo["value"]) ) {
if(array_key_exists("roomurlshow",$config_ini) && array_key_exists($roominfo["key"],$config_ini["roomurlshow"]) && $config_ini["roomurlshow"][$roominfo["key"]] == 1) {
print ' <li id="'.$roominfo["key"].'room" ><a href="'.urldecode($roominfo["value"]).'">'.$roominfo["key"].'</a></li>'."\n";
}
/**
print '<script type="text/javascript">'."\n";
print '$(document).ready( function(){'."\n";
print 'check_yukari_available(\''.$roominfo["value"].'\', "'.$roominfo["key"].'room" );'."\n";
print '});'."\n";
print '</script>'."\n";
**/
}
}
print ' </ul>';
print ' </li>';
print ' <a class="navbar-brand" href="search.php">検索</a>';
print '</ul>';
}else{
print ' <a class="navbar-brand" href="search.php">検索</a>';
}
print <<<EOD
</div>
EOD;
print '<div id="gnavi" class="collapse navbar-collapse">';
print ' <ul class="nav navbar-nav">';
print ' <li ';
if($page == 'requestlist_only.php')
{
print 'class="active" ';
}
print '><a href="'.$prefix.'requestlist_only.php">予約一覧 </a></li>';
// print ' <li ';
print ' <li class="dropdown "';
if($page == 'searchreserve.php')
{
print 'class="active" ';
}
//selectrequestkind();
// print '><a href="'.$prefix.'searchreserve.php">検索&予約</a></li>';
print '><a href="#" class="dropdown-toggle" data-toggle="dropdown" >いろいろ予約 <b class="caret"></b></a>';
print ' <ul class="dropdown-menu">';
selectrequestkind($kind='dd',$prefix);
print ' </ul>';
print '</li>';
print ' <li ';
if($page == 'playerctrl_portal.php')
{
print 'class="active" ';
}
print '><a href="'.$prefix.'playerctrl_portal.php" >Player</a></li>';
// comment
if(commentenabledcheck()){
print ' <li ';
if($page == 'comment.php')
{
print 'class="active" ';
}
print '><a href="'.$prefix.'comment.php">コメント</a></li>';
}
if ($user === 'admin'){
print ' <p class="navbar-text "> <small>管理者ログイン中</small><br>';
if($page == 'init.php'){
print '<button type="button" class="btn btn-success" onclick="document.allconfig.submit();" >設定反映</button>';
}
print ' </p>';
}
print ' <li class="dropdown navbar-right">';
print ' <a href="#" class="dropdown-toggle" data-toggle="dropdown" href="">Help等 <b class="caret"></b></a>';
print ' <ul class="dropdown-menu">';
if(!empty($helpurl)){
print ' <li><a href="'.$helpurl.'">ヘルプ</a></li>';
}
print ' <li><a href="'.$prefix.'init.php">設定</a></li>';
print ' <li><a href="'.$prefix.'toolinfo.php">接続情報表示</a></li>';
if($usebingo){
print ' <li><a href="'.$prefix.'bingo_showresult.php">ビンゴ結果表示</a></li>';
}
print ' <li ';
if($page == 'request.php')
{
print 'class="active" ';
}
print '><a href="'.$prefix.'request.php">全部</a></li>';
print ' <li class="dropdown-header" > ';
print get_version();
print ' </li>';
print ' </ul>';
print ' </li>';
print ' </ul>';
// print ' <p class="navbar-text navbar-right"> <a href="'.$helpurl.'" class="navbar-link">ヘルプ</a> </p>';
print '</div>';
print '</nav>';
// 背景色変更
if(array_key_exists("bgcolor",$config_ini)){
print '<script type="text/javascript" >';
print 'document.body.style.backgroundColor = "'.urldecode($config_ini["bgcolor"]).'";';
print '</script>';
}
}
function shownavigatioinbar_c1($page = 'none'){
shownavigatioinbar($page, '../');
return true;
}
function commentenabledcheck(){
global $config_ini;
if(empty($config_ini['commenturl'])) return false;
if(strcmp($config_ini['commenturl_base'],'notset') == 0 ) return false;
return true;
}
function showmode(){
global $playmode;
print '<div align="center" >';
print '<h4> 現在の動作モード </h4>';
if($playmode == 1){
print ("自動再生開始モード: 自動で次の曲の再生を開始します。");
}elseif ($playmode == 2){
print ("手動再生開始モード: 再生開始を押すと、次の曲が始まります。(歌う人が押してね)");
}elseif ($playmode == 4){
print ("BGMモード: 自動で次の曲の再生を開始します。すべての再生が終わると再生済みの曲をランダムに流します。");
}elseif ($playmode == 5){
print ("BGMモード(ランダムモード): 順番は関係なくリストの中からランダムで再生します。");
}else{
print ("手動プレイリスト登録モード: 機材係が手動でプレイリストに登録しています。");
}
print '</div>';
}
function selectrequestkind($kind='button',$prefix = '', $id='' ){
global $playmode;
global $connectinternet;
global $usenfrequset;
global $config_ini;
if($kind == 'button'){
print <<<EOD
<div align="center" >
<form method="GET" action="search.php" >
EOD;
if(!empty($id)){
print '<input type="hidden" name="selectid" value="'.$id.'" />'."\n";
}
print <<<EOD
<input type="submit" name="曲検索はこちら" value="曲検索はこちら" class="topbtn btn btn-default btn-lg"/>
</form>
</div>
EOD;
}else if($kind == 'dd'){
print ' <li><a href="'.$prefix.'searchreserve.php">検索&予約MENU</a></li>';
print ' <li role="separator" class="divider"></li>';
if( !empty($config_ini["limitlistname"][0]) ){
for($i = 0 ; $i<count($config_ini["limitlistname"]) ; $i++){
if(empty($config_ini["limitlistname"][$i])) continue;
print ' <li><a href="'.$prefix.'limitlist.php?data='.$config_ini["limitlistfile"][$i].'">'.$config_ini["limitlistname"][$i].'</a></li>';
}
print ' <li role="separator" class="divider"></li>';
}
if( $config_ini["usebgv"] == 1 && !empty($config_ini["BGVfolder"]) ){
print ' <li><a href="'.$prefix.'search_bgv.php">BGV選択</a></li>';
}
print ' <li><a href="'.$prefix.'search.php">ファイル検索</a></li>';
}
if ($playmode != 4 && $playmode != 5){
if (configbool("usehaishin", true) ) {
if($kind == 'button'){
print '<div align="center" >';
print '<form method="GET" action="request_confirm.php?shop_karaoke=1" >';
print '<input type="hidden" name="shop_karaoke" value="1" />';
if(!empty($id)){
print '<input type="hidden" name="selectid" value="'.$id.'" />'."\n";
}
print '<input type="submit" name="配信" value="カラオケ配信曲を歌いたい場合はこちらから" class="topbtn btn btn-default btn-lg"/> ';
print '</form>';
print '</div>';
}else if($kind == 'dd'){
print ' <li><a href="'.$prefix.'request_confirm.php?shop_karaoke=1">カラオケ配信</a></li>';
}
}
global $user;
if (configbool("useuserpause", false) || ($user == 'admin' )) {
if($kind == 'button'){
print '<div align="center" >';
print '<form method="GET" action="request_confirm.php?shop_karaoke=1" >';
print '<input type="hidden" name="pause" value="1" />';
if(!empty($id)){
print '<input type="hidden" name="selectid" value="'.$id.'" />'."\n";
}
print '<input type="submit" name="小休止" value="小休止リクエスト" class="topbtn btn btn-default btn-lg"/> ';
print '</form>';
print '</div>';
}else if($kind == 'dd'){
print ' <li><a href="'.$prefix.'request_confirm.php?pause=1">小休止</a></li>';
}
}
}
if (!empty($config_ini["downloadfolder"]) && (check_access_from_online() === false) ){
if($kind == 'button'){
print '<div align="center" >';
print '<form method="GET" action="file_uploader.php" >';
if(!empty($id)){
print '<input type="hidden" name="selectid" value="'.$id.'" />'."\n";
}
print '<input type="submit" name="UPL" value="手元のファイルを転送して予約する場合はこちらから" class="topbtn btn btn-default btn-lg"/> ';
print '</form>';
print '</div>';
}else if($kind == 'dd'){
print ' <li><a href="'.$prefix.'file_uploader.php">ファイル転送</a></li>';
}
}
if( nicofuncenabled() === true){
if($kind == 'button'){
print '<div align="center" >';
print '<form method="GET" action="nicodownload_post.php" >';
if(!empty($id)){
print '<input type="hidden" name="selectid" value="'.$id.'" />'."\n";
}
print '<input type="submit" name="nico" value="ニコニコ動画ダウンロード予約はこちら" class="topbtn btn btn-default btn-lg"/> ';
print '</form>';
print '</div>';
}else if($kind == 'dd'){
print ' <li><a href="'.$prefix.'nicodownload_post.php">ニコニコ動画</a></li>';
}
}
if( $connectinternet == 1){
if($kind == 'button'){
print '<div align="center" >';
print '<form method="GET" action="request_confirm_url.php?shop_karaoke=0" >';
print '<input type="hidden" name="set_directurl" value="1" />';
if(!empty($id)){
print '<input type="hidden" name="selectid" value="'.$id.'" />'."\n";
}
print '<input type="submit" name="URL" value="インターネット直接再生はこちらから(Youtube等)" class="topbtn btn btn-default btn-lg"/> ';
print '</form>';
print '</div>';
}else if($kind == 'dd'){
print ' <li><a href="'.$prefix.'request_confirm_url.php?shop_karaoke=0&set_directurl=1">URL(youtube等)</a></li>';
}
}
if($usenfrequset == 1) {
if($kind == 'button'){
print '<div align="center" >';
print '<form method="GET" action="notfoundrequest/notfoundrequest.php" >';
print '<input type="submit" name="noffoundsong" value="見つからなかった曲があればこちらから教えてください" class="topbtn btn btn-default btn-lg"/>';
print '</form>';
print '</div>';
}else if($kind == 'dd'){
print ' <li role="separator" class="divider"></li>';
print ' <li><a href="'.$prefix.'notfoundrequest/notfoundrequest.php">未発見曲報告</a></li>';
}
}
}
function writeconfig2ini($config_ini,$configfile)
{
$fp = fopen($configfile, 'w');
foreach ($config_ini as $k => $i){
if(is_array($i)){
foreach ($i as $key2 => $item2){
if(!empty($item2) ) {
fputs($fp, $k.'['.$key2.']='.$item2."\n");
}
}
}else {
fputs($fp, "$k=$i\n");
}
}
fclose($fp);
if( $configfile == "config.ini" ){
inieolchange();
iniroomchange($config_ini);
}
}
function multiroomenabled(){
global $config_ini;
$roomcounter = 0;
foreach($config_ini["roomurl"] as $k => $i){
if(!empty($i)){
if(array_key_exists("roomurlshow",$config_ini) && array_key_exists($k,$config_ini["roomurlshow"]) && $config_ini["roomurlshow"][$k] == 1) {
$roomcounter++;
}
}
}
if($roomcounter > 1) return true;
return false;
}
function nicofuncenabled(){
global $config_ini;
global $connectinternet;
if($connectinternet != 1) {
return false;
};
if(array_key_exists("nicoid", $config_ini)) {
$nicologinid = urldecode($config_ini["nicoid"]);
}
if(array_key_exists("nicopass", $config_ini)) {
$nicopass = $config_ini["nicopass"];
}
if(!empty($nicologinid) && !empty($nicopass)) {
return true;
}else {
return false;
}
}
// 改行コード変換
function convertEOL($string, $to = "\n")
{
return strtr($string, array(
"\r\n" => $to,
"\r" => $to,
"\n" => $to,
));
}
// ini.iniファイル 改行コード変換
function inieolchange($file = 'ini.ini'){
$fd = fopen($file,'r+');
if($fd === false ){
print "ini.ini open failed";
return;
}
$str = fread($fd,8192);
$str = convertEOL($str,"\r\n");
fseek($fd, 0, SEEK_SET);
fwrite($fd,$str);
fclose($fd);
}
function is_url($text) {
if (preg_match('/^(https?|ftp)(:\/\/[-_.!~*\'()a-zA-Z0-9;\/?:\@&=+\$,%#]+)$/', $text)) {
return TRUE;
} else {
return FALSE;
}
}
function commenturl_mod($commenturl = 'http://'.'localhost'.'/cms/r.php' ){
if(! is_url($commenturl)) {
$commenturl = 'http://'.$_SERVER["SERVER_ADDR"].'/cms/r.php'.$commenturl.' is not commenturl' ;
}
$commenturl = preg_replace('/\/r.*\.php/','',$commenturl);
if( $commenturl === null ){
$commenturl = 'http://'.$_SERVER["SERVER_ADDR"].'/cms/r.php'.'not commenturl';
}
return $commenturl;
}
// ini.iniファイル room no 変更
function iniroomchange($config_ini,$file = 'ini.ini'){
$ini_a = array();
$fd = fopen($file,'r+');
if($fd === false ){
print "ini.ini open failed";
return;
}
while (($buffer = fgets($fd, 4096)) !== false) {
$ini_a[] = trim($buffer);
}
if (!feof($fd)) {
echo "Error: unexpected fgets() fail\n";
fclose($fd);
return;
}
$ini_a[0] = $config_ini["commentroom"];
$ini_a[2] = commenturl_mod(urldecode($config_ini["commenturl_base"]));
fseek($fd, 0, SEEK_SET);
$writebyte = 0;
foreach($ini_a as $oneline){
$res = fwrite($fd,$oneline."\r\n");
$writebyte = $writebyte + $res;
}
ftruncate($fd,$writebyte);
fclose($fd);
}
function get_git_version(){
global $config_ini;
$result_str = null;
if(array_key_exists("gitcommandpath", $config_ini)){
$gitcmd = urldecode($config_ini["gitcommandpath"]);
if(file_exists($gitcmd)){
$execcmd = $gitcmd.' describe --tags';
$result_str = exec($execcmd);
if( mb_substr($result_str ,0 ,1) === 'v' ){
if(is_numeric( mb_substr($result_str ,1 ,1))){
$git_version = $result_str;
}
}
}
}
return $result_str;
}
// バージョン情報
function get_version(){
$localversion = '';
if(file_exists('version')){
$localversion = file_get_contents('version');
}
$gitversion = get_git_version();
if(empty($gitversion)){
return $localversion;
}else {
return $gitversion;
}
}
function get_gittaglist(&$errmsg = 'none'){
global $config_ini;
$taglist = array();
$errorcnt = 0;
if(array_key_exists("gitcommandpath", $config_ini)){
$gitcmd = urldecode($config_ini["gitcommandpath"]);
if(file_exists($gitcmd)){
$execcmd = $gitcmd.' config --global core.autoCRLF false';
exec($execcmd);
$execcmd = $gitcmd.' fetch origin';
set_time_limit (900);
exec($execcmd,$result_str);
foreach($result_str as $line){
$err_str_pos = mb_strstr($line, "unable to access");
if( !$err_str_pos ) {
$errmsg .= "network access failed";
$errorcnt ++;
}else if (mb_strstr($line, "fatal") !== false) {
$errmsg .= "fetch unknown error: $line";
$errorcnt ++;
}
}
if($errorcnt > 0){
return $taglist;
}
$execcmd = $gitcmd.' tag';
exec($execcmd, $result_str);
foreach($result_str as $line){
if( mb_substr($line ,0 ,1) === 'v' ){
if(is_numeric( mb_substr($line ,1 ,1))){
$taglist[] = $line;
}
}
}
}
}
return $taglist;
}
// memo
// cd c:\xampp\htdocs
// gitcmd\cmd\git config --global core.autoCRLF false
// gitcmd\cmd\git fetch origin
// gitcmd\cmd\git reset --hard origin/master
function update_fromgit($version_str, &$errmsg){
global $config_ini;
$taglist = array();
$errorcnt = 0;
if(array_key_exists("gitcommandpath", $config_ini)){
$gitcmd = urldecode($config_ini["gitcommandpath"]);
if(file_exists($gitcmd)){
$execcmd = $gitcmd.' config --global core.autoCRLF false';
exec($execcmd);
$execcmd = $gitcmd.' fetch origin';
set_time_limit (900);
exec($execcmd,$result_str);
foreach($result_str as $line){
$err_str_pos = mb_strpos($line, "unable to access");
if( $err_str_pos !== false ) {
$errmsg .= "network access failed";
$errorcnt ++;
}else if (mb_strstr($line, "fatal") !== false) {
$errmsg .= "fetch unknown error: $line";
$errorcnt ++;
}
}
if($errorcnt > 1){
return false;
}
$execcmd = $gitcmd.' reset --hard '.$version_str;
exec($execcmd,$result_str);
foreach($result_str as $line){
$err_str_pos = mb_strpos($line, "unknown revision");
if( $err_str_pos !== false) {
$errmsg .= "no version : $version_str";
$errorcnt ++;
}else if (mb_strstr($line, "fatal") !== false) {
$errmsg .= "reset --hard unknown error: $line";
$errorcnt ++;
}
}
}
}
if($errorcnt > 0) {
return false;
}
return true;
}
function make_preview_modal($filepath, $modalid) {
global $everythinghost;
// print $filepath;
$dlpathinfo = pathinfo($filepath);
if(array_key_exists('extension',$dlpathinfo)){
$filetype = '';
if($dlpathinfo['extension'] === 'mp4'){
$filetype = ' type="video/mp4"';
}else if($dlpathinfo['extension'] === 'flv'){
$filetype = ' type="video/x-flv"';
}else if($dlpathinfo['extension'] === 'avi'){
$filetype = ' type="video/x-msvideo"';
return null;
}else {
return null;
return "この動画形式はプレビューできません";
}
}else {
return null;
}
$previewpath[] = "http://" . $everythinghost . ":81/" . urlencode($filepath);
$filepath_url = str_replace('\\', '/', $filepath);
$previewpath[] = "http://" . $everythinghost . ":81/" . ($filepath_url);
$button='<a href="#" data-toggle="modal" class="previewmodallink btn btn-default" data-target="#'.$modalid.'" > プレビュー </a>';
$previewsource = "";
foreach($previewpath as $previewurl ){
$previewsource = $previewsource.'<source src="'.$previewurl.'" '.$filetype.' />';
}
$modaljs='<script>
$(function () {
$(\'#'.$modalid.'\').on(\'hidden.bs.modal\', function (event) {
var myPlayer = videojs("preview_video_'.$modalid.'a");
myPlayer.pause();
});
});</script>';
$modaldg='<!-- 2.モーダルの配置 -->'.
'<div class="modal" id="'.$modalid.'" tabindex="-1">'.
' <div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal">
<span aria-hidden="true">×</span>
</button>
<h4 class="modal-title" id="modal-label">動画プレビュー</h4>
</div>
<div class="modal-body">
<video id="preview_video_'.$modalid.'a" class="video-js vjs-default-skin" controls muted preload="none" data-setup="{}" style="width: 320px; height: 180px;" >'.$previewsource.'
</video>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">閉じる</button>
</div>
</div>
</div>
</div>';
return $button."\n".$modaljs.$modaldg;
}
function basename_jp($path){
$p_info = explode('\\', $path);
return end($p_info);
}
/*
* BR タグを改行コードに変換する
*/
function br2nl($string)
{
// 大文字・小文字を区別しない
return preg_replace('/<br[[:space:]]*\/?[[:space:]]*>/i', "\n", $string);
}
function configbool($keyword, $defaultvalue){
global $config_ini;
$retval = null;
if(array_key_exists($keyword,$config_ini ) ){
if( $config_ini[$keyword] == 1 ) {
$retval = true;
}else {
$retval = false;
}
}else {
$retval = $defaultvalue;
}
return $retval;
}
function checkbox_check($arr,$word){
$res = 0;
foreach($arr as $value){
if($value === $word) {
$res = 1;
}
}
return $res;
}
function getphpversion(){
if (!defined('PHP_VERSION_ID')) {
$version = explode('.', PHP_VERSION);
define('PHP_VERSION_ID', ($version[0] * 10000 + $version[1] * 100 + $version[2]));
}
return PHP_VERSION_ID;
}
function file_exist_check_japanese_cf($filename){
$filename_check = $filename;
if(getphpversion() < 70100 ){
setlocale(LC_CTYPE, 'Japanese_Japan.932');
$filename_check =addslashes($filename);
}
$fileinfo = @fopen($filename_check,'r');
if($fileinfo != FALSE){
fclose($fileinfo);
// logtocmd 'DEBUG : Success fopen' ;
return TRUE;
}
return FALSE;
}
function fileexistcheck($filebasename){
// ニコカラりすたーで検索
global $config_ini;
$lister_dbpath='';
if (file_exist_check_japanese_cf(urldecode($config_ini['listerDBPATH'])) ){
$lister_dbpath=urldecode($config_ini['listerDBPATH']);
}
require_once('function_search_listerdb.php');
if(!empty($lister_dbpath) ){
// DB初期化
$lister = new ListerDB();
$lister->listerdbfile = $lister_dbpath;
$listerdb = $lister->initdb();
if( $listerdb ) {
$select_where = ' WHERE found_path LIKE ' . $listerdb->quote('%'.$filebasename.'%');
$sql = 'select * from t_found '. $select_where.';';
$alldbdata = $lister->select($sql);
if($alldbdata){
$filepath_utf8 = $alldbdata[0]['found_path'];
return $filepath_utf8;
}
}
}
// Everythingで検索
$jsonurl = "http://" . "localhost" . ":81/?search=" . urlencode($filebasename) . "&sort=size&ascending=0&path=1&path_column=3&size_column=4&json=1";
$json = file_get_html_with_retry($jsonurl, 5);
if($json != false){
$decode = json_decode($json, true);
if($decode != NULL && isset($decode{'results'}{'0'})){
if(array_key_exists('path',$decode{'results'}{'0'}) && array_key_exists('name',$decode{'results'}{'0'})){
$filepath_utf8 = $decode{'results'}{'0'}{'path'} . "\\" . $decode{'results'}{'0'}{'name'};
return $filepath_utf8;
}
}
}
return false;
}
function get_fullfilename2($l_fullpath,$word,&$filepath_utf8){
$filepath_utf8 = "";
// 引数チェック
if(empty($l_fullpath) && empty($word) ) return "";
// ファイル名のチェック
// logtocmd ("Debug l_fullpath: $l_fullpath\r\n");
global $config_ini;
$lister_dbpath='';
if (file_exist_check_japanese_cf(urldecode($config_ini['listerDBPATH'])) ){
$lister_dbpath=urldecode($config_ini['listerDBPATH']);
}
$winfillpath = mb_convert_encoding($l_fullpath,"SJIS-win");
$fileinfo=file_exist_check_japanese_cf($winfillpath);
// logtocmd ("Debug#".$fileinfo);
if($fileinfo !== FALSE){
$filepath = $winfillpath;
$filepath_utf8=$l_fullpath;
}else{
$filepath = null;
// まず フルパス中のbasenameで再検索
$songbasename = basename($l_fullpath);
// ニコカラりすたーで検索
if(!empty($lister_dbpath) ){
logtocmd ("fullpass file $l_fullpath is not found. Search from NicokaraLister DB.: $songbasename\r\n");
require_once('function_search_listerdb.php');
// DB初期化
$lister = new ListerDB();
$lister->listerdbfile = $lister_dbpath;
$listerdb = $lister->initdb();
if( $listerdb ) {
$select_where = ' WHERE found_path LIKE ' . $listerdb->quote('%'.$songbasename.'%');
$sql = 'select * from t_found '. $select_where.';';
$alldbdata = $lister->select($sql);
if($alldbdata){
$filepath_utf8 = $alldbdata[0]['found_path'];
$filepath = mb_convert_encoding($filepath_utf8,"cp932","UTF-8");
logtocmd ($songbasename.'代わりに「'.$filepath_utf8.'」を再生します'."\n");
return $filepath;
}
// 曲名で再検索
$select_where = ' WHERE found_path LIKE ' . $listerdb->quote('%'.$word.'%');
$sql = 'select * from t_found '. $select_where.';';
$alldbdata = $lister->select($sql);
if($alldbdata){
$filepath_utf8 = $alldbdata[0]['found_path'];
$filepath = mb_convert_encoding($filepath_utf8,"cp932","UTF-8");
logtocmd ($word.'代わりに「'.$filepath_utf8.'」を再生します'."\n");
return $filepath;
}
}
}
// Everythingで検索
// logtocmd ("fullpass file $winfillpath is not found. Search from Everything DB.: $songbasename\r\n");
$jsonurl = "http://" . "localhost" . ":81/?search=" . urlencode($songbasename) . "&sort=size&ascending=0&path=1&path_column=3&size_column=4&json=1";
$json = file_get_html_with_retry($jsonurl, 5);
if($json != false){
$decode = json_decode($json, true);
if($decode != NULL && isset($decode{'results'}{'0'})){
if(array_key_exists('path',$decode{'results'}{'0'}) && array_key_exists('name',$decode{'results'}{'0'})){
$filepath_utf8 = $decode{'results'}{'0'}{'path'} . "\\" . $decode{'results'}{'0'}{'name'};
$filepath = mb_convert_encoding($filepath_utf8,"cp932","UTF-8");
}
}
}
if(empty($filepath)){
// 曲名で再検索
logtocmd ("fullpass basename $songbasename is not found. Search from Everything DB.: $word\r\n");
$jsonurl = "http://" . "localhost" . ":81/?search=" . urlencode($word) . "&sort=size&ascending=0&path=1&path_column=3&size_column=4&json=1";
// logtocmd_cf $jsonurl;
$json = file_get_html_with_retry($jsonurl, 5);
$decode = json_decode($json, true);
if( !isset($decode{'results'}{'0'}{'name'}) ) return false;
$filepath = $decode{'results'}{'0'}{'path'} . "\\" . $decode{'results'}{'0'}{'name'};
$filepath_utf8= $filepath;
$filepath = mb_convert_encoding($filepath,"cp932");
logtocmd ('代わりに「'.$filepath_utf8.'」を再生します'."\n");
}
}
return $filepath;
}
function logtocmd_cf($msg){
//print(mb_convert_encoding("$msg\n","SJIS-win"));
error_log($msg."\n", 3, 'ykrdebug.log');
}
?>
| 30.679692 | 204 | 0.544476 |
2fd55a9541f3cdf811e93acd233be903776f8c4b | 2,268 | py | Python | spNQueenTheorem/generator.py | lightofanima/Adventures | efb9d001520ff0b88746d8b3cf024de3307e45c7 | [
"MIT"
] | null | null | null | spNQueenTheorem/generator.py | lightofanima/Adventures | efb9d001520ff0b88746d8b3cf024de3307e45c7 | [
"MIT"
] | null | null | null | spNQueenTheorem/generator.py | lightofanima/Adventures | efb9d001520ff0b88746d8b3cf024de3307e45c7 | [
"MIT"
] | null | null | null | # This script generates a theorem for them Z3 SAT solver. The output of this program
# is designed to be the input for http://rise4fun.com/z3
# The output of z3 is the coordinates of the queens for a solution to the N Queen problem :)
#Prints an assert statement
def zassert(x):
print("( assert ( {} ) )".format(x))
#Prints a declaration
def zdeclare(x, type="Int"):
print("( declare-const {} {} )".format(x,type))
#Generates a Z3 proof.
# N = number of queens.
# G = grid size (8 = chess board)
def generate(N, G) :
zdeclare("N") #Nuber of queens
zdeclare("G") #Board size
zassert("= N {}".format(N)) #Init N
zassert("= G {}".format(G)) #Init G
#Generate queen names
queensX = ["P{}_x".format(n) for n in range(0, N) ]
queensY = ["P{}_y".format(n) for n in range(0, N) ]
#Declare queens
for i in range(N):
zdeclare(queensX[i])
zdeclare(queensY[i])
#For each queen Position
for P in range(N):
#Assert bounds
zassert(">= {} 0".format(queensX[P]))
zassert(">= {} 0".format(queensY[P]))
zassert("< {} G".format(queensX[P]))
zassert("< {} G".format(queensY[P]))
for PP in range(P+1, N):
#Assert Horizontal and Vertical Uniqueness
zassert("not ( or (= {ax} {bx} ) (= {ay} {by} ) )"
.format(ax=queensX[P], bx=queensX[PP], ay=queensY[P], by=queensY[PP]))
#Assert Diagonal uniqueness
# / angle
zassert("not ( exists (( t Int )) ( and ( and ( and ( = (+ {ax} t) {bx} ) ( >= (+ {ax} t) 0 ) ) ( < (+ {ax} t) G ) ) ( and ( and ( = (+ {ay} t) {by} ) ( >= (+ {ay} t) 0 ) ) ( < (+ {ay} t) G ) ) ) )"
.format(ax=queensX[P], bx=queensX[PP], ay=queensY[P], by=queensY[PP]))
# \ angle
zassert("not ( exists (( t Int )) ( and ( and ( and ( = (+ {ax} t) {bx} ) ( >= (+ {ax} t) 0 ) ) ( < (+ {ax} t) G ) ) ( and ( and ( = (- {ay} t) {by} ) ( >= (- {ay} t) 0 ) ) ( < (- {ay} t) G ) ) ) )"
.format(ax=queensX[P], bx=queensX[PP], ay=queensY[P], by=queensY[PP]))
print("(check-sat)")
print("(get-model)")
#Generate proof for 8 queens on an 8x8 grid
generate(8,8) | 36 | 211 | 0.507055 |
1f00680a4b855af02642eac0a7e207be5c37a587 | 8,438 | cs | C# | src/Compilers/Core/Portable/Symbols/PlatformInvokeInformation.cs | pottereric/roslyn | 4bab250a3730d81264524e65befddb5c5d84e059 | [
"Apache-2.0"
] | 1 | 2021-04-18T03:29:16.000Z | 2021-04-18T03:29:16.000Z | src/Compilers/Core/Portable/Symbols/PlatformInvokeInformation.cs | pottereric/roslyn | 4bab250a3730d81264524e65befddb5c5d84e059 | [
"Apache-2.0"
] | null | null | null | src/Compilers/Core/Portable/Symbols/PlatformInvokeInformation.cs | pottereric/roslyn | 4bab250a3730d81264524e65befddb5c5d84e059 | [
"Apache-2.0"
] | null | null | null | // Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Runtime.InteropServices;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
using Cci = Microsoft.Cci;
namespace Microsoft.CodeAnalysis
{
/// <summary>
/// Information that describes how a method from the underlying Platform is to be invoked.
/// </summary>
public sealed class DllImportData : Cci.IPlatformInvokeInformation
{
private readonly string moduleName;
private readonly string entryPointName; // null if unspecified, the name of the target method should be used
private readonly Cci.PInvokeAttributes flags;
internal DllImportData(string moduleName, string entryPointName, Cci.PInvokeAttributes flags)
{
this.moduleName = moduleName;
this.entryPointName = entryPointName;
this.flags = flags;
}
/// <summary>
/// Module name. Null if value specified in the attribute is not valid.
/// </summary>
public string ModuleName
{
get { return moduleName; }
}
/// <summary>
/// Name of the native entry point or null if not specified (the effective name is the same as the name of the target method).
/// </summary>
public string EntryPointName
{
get { return entryPointName; }
}
Cci.PInvokeAttributes Cci.IPlatformInvokeInformation.Flags
{
get { return flags; }
}
/// <summary>
/// Controls whether the <see cref="CharacterSet"/> field causes the common language runtime
/// to search an unmanaged DLL for entry-point names other than the one specified.
/// </summary>
public bool ExactSpelling
{
get
{
return (flags & Cci.PInvokeAttributes.NoMangle) != 0;
}
}
/// <summary>
/// Indicates how to marshal string parameters and controls name mangling.
/// </summary>
public CharSet CharacterSet
{
get
{
switch (flags & Cci.PInvokeAttributes.CharSetMask)
{
case Cci.PInvokeAttributes.CharSetAnsi:
return CharSet.Ansi;
case Cci.PInvokeAttributes.CharSetUnicode:
return CharSet.Unicode;
case Cci.PInvokeAttributes.CharSetAuto:
return Cci.Constants.CharSet_Auto;
case 0:
return Cci.Constants.CharSet_None;
}
throw ExceptionUtilities.UnexpectedValue(flags);
}
}
/// <summary>
/// Indicates whether the callee calls the SetLastError Win32 API function before returning from the attributed method.
/// </summary>
public bool SetLastError
{
get
{
return (flags & Cci.PInvokeAttributes.SupportsLastError) != 0;
}
}
/// <summary>
/// Indicates the calling convention of an entry point.
/// </summary>
public CallingConvention CallingConvention
{
get
{
switch (flags & Cci.PInvokeAttributes.CallConvMask)
{
default:
case Cci.PInvokeAttributes.CallConvWinapi:
return CallingConvention.Winapi;
case Cci.PInvokeAttributes.CallConvCdecl:
return CallingConvention.Cdecl;
case Cci.PInvokeAttributes.CallConvStdcall:
return CallingConvention.StdCall;
case Cci.PInvokeAttributes.CallConvThiscall:
return CallingConvention.ThisCall;
case Cci.PInvokeAttributes.CallConvFastcall:
return Cci.Constants.CallingConvention_FastCall;
}
}
}
/// <summary>
/// Enables or disables best-fit mapping behavior when converting Unicode characters to ANSI characters.
/// Null if not specified (the setting for the containing type or assembly should be used, <see cref="BestFitMappingAttribute"/>).
/// </summary>
public bool? BestFitMapping
{
get
{
switch (flags & Cci.PInvokeAttributes.BestFitMask)
{
case Cci.PInvokeAttributes.BestFitEnabled:
return true;
case Cci.PInvokeAttributes.BestFitDisabled:
return false;
default:
return null;
}
}
}
/// <summary>
/// Enables or disables the throwing of an exception on an unmappable Unicode character that is converted to an ANSI "?" character.
/// Null if not specified.
/// </summary>
public bool? ThrowOnUnmappableCharacter
{
get
{
switch (flags & Cci.PInvokeAttributes.ThrowOnUnmappableCharMask)
{
case Cci.PInvokeAttributes.ThrowOnUnmappableCharEnabled:
return true;
case Cci.PInvokeAttributes.ThrowOnUnmappableCharDisabled:
return false;
default:
return null;
}
}
}
internal static Cci.PInvokeAttributes MakeFlags(bool noMangle, CharSet charSet, bool setLastError, CallingConvention callingConvention, bool? useBestFit, bool? throwOnUnmappable)
{
Cci.PInvokeAttributes result = 0;
if (noMangle)
{
result |= Cci.PInvokeAttributes.NoMangle;
}
switch (charSet)
{
default: // Dev10: use default without reporting an error
case Cci.Constants.CharSet_None:
break;
case CharSet.Ansi:
result |= Cci.PInvokeAttributes.CharSetAnsi;
break;
case CharSet.Unicode:
result |= Cci.PInvokeAttributes.CharSetUnicode;
break;
case Cci.Constants.CharSet_Auto:
result |= Cci.PInvokeAttributes.CharSetAuto;
break;
}
if (setLastError)
{
result |= Cci.PInvokeAttributes.SupportsLastError;
}
switch (callingConvention)
{
default: // Dev10: uses default without reporting an error
case CallingConvention.Winapi:
result |= Cci.PInvokeAttributes.CallConvWinapi;
break;
case CallingConvention.Cdecl:
result |= Cci.PInvokeAttributes.CallConvCdecl;
break;
case CallingConvention.StdCall:
result |= Cci.PInvokeAttributes.CallConvStdcall;
break;
case CallingConvention.ThisCall:
result |= Cci.PInvokeAttributes.CallConvThiscall;
break;
case Cci.Constants.CallingConvention_FastCall:
result |= Cci.PInvokeAttributes.CallConvFastcall;
break;
}
if (throwOnUnmappable.HasValue)
{
if (throwOnUnmappable.Value)
{
result |= Cci.PInvokeAttributes.ThrowOnUnmappableCharEnabled;
}
else
{
result |= Cci.PInvokeAttributes.ThrowOnUnmappableCharDisabled;
}
}
if (useBestFit.HasValue)
{
if (useBestFit.Value)
{
result |= Cci.PInvokeAttributes.BestFitEnabled;
}
else
{
result |= Cci.PInvokeAttributes.BestFitDisabled;
}
}
return result;
}
}
}
| 33.484127 | 186 | 0.524176 |
b45b17206a4feda670a47fe421b40131ad6858c5 | 76 | sql | SQL | hackerrank/sql/regularExpressions/weather-observation-station-9.sql | civilian/competitive_programing | a6ae7ad0db84240667c1dd6231c51c586ba040c7 | [
"MIT"
] | 1 | 2016-02-11T21:28:22.000Z | 2016-02-11T21:28:22.000Z | hackerrank/sql/regularExpressions/weather-observation-station-9.sql | civilian/competitive_programing | a6ae7ad0db84240667c1dd6231c51c586ba040c7 | [
"MIT"
] | null | null | null | hackerrank/sql/regularExpressions/weather-observation-station-9.sql | civilian/competitive_programing | a6ae7ad0db84240667c1dd6231c51c586ba040c7 | [
"MIT"
] | null | null | null | SELECT DISTINCT city
FROM station
WHERE city RLIKE '^(?![aeiouAEIOU]).*$' | 25.333333 | 40 | 0.697368 |
43c0f9e0722c4164bea18a164420e88131585e06 | 2,091 | sql | SQL | models/staging/edfi/stg_edfi_survey_responses.sql | K12-Analytics-Engineering/dbt | e7863556a148979729855743666737e74ba650d2 | [
"Apache-2.0"
] | 1 | 2022-03-21T13:50:43.000Z | 2022-03-21T13:50:43.000Z | models/staging/edfi/stg_edfi_survey_responses.sql | K12-Analytics-Engineering/dbt | e7863556a148979729855743666737e74ba650d2 | [
"Apache-2.0"
] | null | null | null | models/staging/edfi/stg_edfi_survey_responses.sql | K12-Analytics-Engineering/dbt | e7863556a148979729855743666737e74ba650d2 | [
"Apache-2.0"
] | null | null | null | select NULL as column1
{#
with parsed_data as (
select
date_extracted as date_extracted,
school_year as school_year,
json_value(data, '$.id') as id,
json_value(data, '$.surveyResponseIdentifier') as survey_response_identifier,
struct(
json_value(data, '$.parentReference.parentUniqueId') as parent_unique_id
) as parent_reference,
struct(
json_value(data, '$.staffReference.staffUniqueId') as staff_unique_id
) as staff_reference,
struct(
json_value(data, '$.studentReference.studentUniqueId') as student_unique_id
) as student_reference,
struct(
json_value(data, '$.surveyReference.namespace') as namespace,
json_value(data, '$.surveyReference.surveyIdentifier') as survey_identifier
) as survey_reference,
json_value(data, '$.electronicMailAddress') as electronic_mail_address,
json_value(data, '$.fullName') as full_name,
json_value(data, '$.location') as location,
parse_date('%Y-%m-%d', json_value(data, "$.responseDate")) as response_date,
cast(json_value(data, "$.responseTime") as int64) as response_time,
array(
select as struct
split(json_value(survey_levels, '$.surveyLevelDescriptor'), '#')[OFFSET(1)] as survey_level_descriptor
from unnest(json_query_array(data, "$.surveyLevels")) survey_levels
) as survey_levels
from {{ source('staging', 'base_edfi_survey_responses') }}
where date_extracted >= (
select max(date_extracted) as date_extracted
from {{ source('staging', 'base_edfi_survey_responses') }}
where is_complete_extract is true)
qualify row_number() over (
partition by id
order by date_extracted DESC) = 1
)
select *
from parsed_data
where
id not in (
select id from {{ ref('stg_edfi_deletes') }} edfi_deletes
where parsed_data.school_year = edfi_deletes.school_year) #}
| 41 | 118 | 0.638451 |
4a306eb83d05376c694f57c5e0e9bec82420aaa2 | 656 | swift | Swift | Sources/SwiftYFinance/Classes/DataTypes/RecentStockData.swift | thealpa/SwiftYFinance | 412d3126cbf732fe05584a2952350f63ce5f59dd | [
"MIT"
] | 21 | 2020-08-12T11:11:15.000Z | 2022-02-20T19:38:27.000Z | SwiftYFinance/Classes/DataTypes/RecentStockData.swift | daimajia/SwiftYFinance | e7a32c62d9e6c0d566c04782d98b213f56d6b035 | [
"MIT"
] | 2 | 2020-08-24T09:19:54.000Z | 2021-07-27T10:41:54.000Z | SwiftYFinance/Classes/DataTypes/RecentStockData.swift | daimajia/SwiftYFinance | e7a32c62d9e6c0d566c04782d98b213f56d6b035 | [
"MIT"
] | 5 | 2021-01-07T03:48:05.000Z | 2021-12-17T23:38:30.000Z | //
// RecentStockData.swift
// SwiftYFinance
//
// Created by Александр Дремов on 12.08.2020.
//
import Foundation
public struct RecentStockData{
public var currency: String?
public var symbol: String?
public var exchangeName: String?
public var instrumentType: String?
public var firstTradeDate: Int?
public var regularMarketTime: Int?
public var gmtoffset: Int?
public var timezone: String?
public var exchangeTimezoneName: String?
public var regularMarketPrice: Float?
public var chartPreviousClose: Float?
public var previousClose: Float?
public var scale: Int?
public var priceHint: Int?
}
| 25.230769 | 46 | 0.721037 |
996a578e4886c8a08006904631008011afcdf2fb | 742 | rs | Rust | src/commands/owner/log.rs | Mishio595/momiji-rust | c9208c85fc47f9b09ffe9b77bd87f7769b04383a | [
"MIT"
] | 4 | 2018-05-31T18:41:05.000Z | 2020-08-26T16:34:19.000Z | src/commands/owner/log.rs | Mishio595/momiji-rust | c9208c85fc47f9b09ffe9b77bd87f7769b04383a | [
"MIT"
] | null | null | null | src/commands/owner/log.rs | Mishio595/momiji-rust | c9208c85fc47f9b09ffe9b77bd87f7769b04383a | [
"MIT"
] | null | null | null | // use momiji::core::consts::*;
use momiji::Context;
use momiji::framework::args::Args;
use momiji::framework::command::{Command, Options};
use twilight_model::channel::Message;
use std::sync::Arc;
use std::error::Error;
pub struct Log;
#[async_trait]
impl Command for Log {
fn options(&self) -> Arc<Options> {
let options = Options {
owner_only: true,
help_available: false,
..Options::default()
};
Arc::new(options)
}
async fn run(&self, message: Message, _: Args, ctx: Context) -> Result<(), Box<dyn Error + Send + Sync>> {
ctx.http.create_message(message.channel_id).reply(message.id).content("Command not yet implemented")?.await?;
Ok(())
}
} | 28.538462 | 117 | 0.617251 |